index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class TestWorkflowDef {
@Test
public void testContainsType() {
WorkflowDef def = new WorkflowDef();
def.setName("test_workflow");
def.setVersion(1);
def.setSchemaVersion(2);
def.getTasks().add(createWorkflowTask("simple_task_1"));
def.getTasks().add(createWorkflowTask("simple_task_2"));
WorkflowTask task3 = createWorkflowTask("decision_task_1");
def.getTasks().add(task3);
task3.setType(TaskType.DECISION.name());
task3.getDecisionCases()
.put(
"Case1",
Arrays.asList(
createWorkflowTask("case_1_task_1"),
createWorkflowTask("case_1_task_2")));
task3.getDecisionCases()
.put(
"Case2",
Arrays.asList(
createWorkflowTask("case_2_task_1"),
createWorkflowTask("case_2_task_2")));
task3.getDecisionCases()
.put(
"Case3",
Collections.singletonList(
deciderTask(
"decision_task_2",
toMap("Case31", "case31_task_1", "case_31_task_2"),
Collections.singletonList("case3_def_task"))));
def.getTasks().add(createWorkflowTask("simple_task_3"));
assertTrue(def.containsType(TaskType.SIMPLE.name()));
assertTrue(def.containsType(TaskType.DECISION.name()));
assertFalse(def.containsType(TaskType.DO_WHILE.name()));
}
@Test
public void testGetNextTask_Decision() {
WorkflowDef def = new WorkflowDef();
def.setName("test_workflow");
def.setVersion(1);
def.setSchemaVersion(2);
def.getTasks().add(createWorkflowTask("simple_task_1"));
def.getTasks().add(createWorkflowTask("simple_task_2"));
WorkflowTask task3 = createWorkflowTask("decision_task_1");
def.getTasks().add(task3);
task3.setType(TaskType.DECISION.name());
task3.getDecisionCases()
.put(
"Case1",
Arrays.asList(
createWorkflowTask("case_1_task_1"),
createWorkflowTask("case_1_task_2")));
task3.getDecisionCases()
.put(
"Case2",
Arrays.asList(
createWorkflowTask("case_2_task_1"),
createWorkflowTask("case_2_task_2")));
task3.getDecisionCases()
.put(
"Case3",
Collections.singletonList(
deciderTask(
"decision_task_2",
toMap("Case31", "case31_task_1", "case_31_task_2"),
Collections.singletonList("case3_def_task"))));
def.getTasks().add(createWorkflowTask("simple_task_3"));
// Assertions
WorkflowTask next = def.getNextTask("simple_task_1");
assertNotNull(next);
assertEquals("simple_task_2", next.getTaskReferenceName());
next = def.getNextTask("simple_task_2");
assertNotNull(next);
assertEquals(task3.getTaskReferenceName(), next.getTaskReferenceName());
next = def.getNextTask("decision_task_1");
assertNotNull(next);
assertEquals("simple_task_3", next.getTaskReferenceName());
next = def.getNextTask("case_1_task_1");
assertNotNull(next);
assertEquals("case_1_task_2", next.getTaskReferenceName());
next = def.getNextTask("case_1_task_2");
assertNotNull(next);
assertEquals("simple_task_3", next.getTaskReferenceName());
next = def.getNextTask("case3_def_task");
assertNotNull(next);
assertEquals("simple_task_3", next.getTaskReferenceName());
next = def.getNextTask("case31_task_1");
assertNotNull(next);
assertEquals("case_31_task_2", next.getTaskReferenceName());
}
@Test
public void testGetNextTask_Conditional() {
String COND_TASK_WF = "COND_TASK_WF";
List<WorkflowTask> workflowTasks = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
workflowTasks.add(createWorkflowTask("junit_task_" + i));
}
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName(COND_TASK_WF);
workflowDef.setDescription(COND_TASK_WF);
WorkflowTask subCaseTask = new WorkflowTask();
subCaseTask.setType(TaskType.DECISION.name());
subCaseTask.setCaseValueParam("case2");
subCaseTask.setName("case2");
subCaseTask.setTaskReferenceName("case2");
Map<String, List<WorkflowTask>> dcx = new HashMap<>();
dcx.put("sc1", workflowTasks.subList(4, 5));
dcx.put("sc2", workflowTasks.subList(5, 7));
subCaseTask.setDecisionCases(dcx);
WorkflowTask caseTask = new WorkflowTask();
caseTask.setType(TaskType.DECISION.name());
caseTask.setCaseValueParam("case");
caseTask.setName("case");
caseTask.setTaskReferenceName("case");
Map<String, List<WorkflowTask>> dc = new HashMap<>();
dc.put("c1", Arrays.asList(workflowTasks.get(0), subCaseTask, workflowTasks.get(1)));
dc.put("c2", Collections.singletonList(workflowTasks.get(3)));
caseTask.setDecisionCases(dc);
workflowDef.getTasks().add(caseTask);
workflowDef.getTasks().addAll(workflowTasks.subList(8, 9));
WorkflowTask nextTask = workflowDef.getNextTask("case");
assertEquals("junit_task_8", nextTask.getTaskReferenceName());
nextTask = workflowDef.getNextTask("junit_task_8");
assertNull(nextTask);
nextTask = workflowDef.getNextTask("junit_task_0");
assertNotNull(nextTask);
assertEquals("case2", nextTask.getTaskReferenceName());
nextTask = workflowDef.getNextTask("case2");
assertNotNull(nextTask);
assertEquals("junit_task_1", nextTask.getTaskReferenceName());
}
private WorkflowTask createWorkflowTask(String name) {
WorkflowTask task = new WorkflowTask();
task.setName(name);
task.setTaskReferenceName(name);
return task;
}
private WorkflowTask deciderTask(
String name, Map<String, List<String>> decisions, List<String> defaultTasks) {
WorkflowTask task = createWorkflowTask(name);
task.setType(TaskType.DECISION.name());
decisions.forEach(
(key, value) -> {
List<WorkflowTask> tasks = new LinkedList<>();
value.forEach(taskName -> tasks.add(createWorkflowTask(taskName)));
task.getDecisionCases().put(key, tasks);
});
List<WorkflowTask> tasks = new LinkedList<>();
defaultTasks.forEach(defaultTask -> tasks.add(createWorkflowTask(defaultTask)));
task.setDefaultCase(tasks);
return task;
}
private Map<String, List<String>> toMap(String key, String... values) {
Map<String, List<String>> map = new HashMap<>();
List<String> vals = Arrays.asList(values);
map.put(key, vals);
return map;
}
}
| 6,600 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.stubbing.Answer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.tasks.PollData;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.event.WorkflowCreationEvent;
import com.netflix.conductor.core.exception.ConflictException;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.mapper.*;
import com.netflix.conductor.core.execution.tasks.*;
import com.netflix.conductor.core.listener.TaskStatusListener;
import com.netflix.conductor.core.listener.WorkflowStatusListener;
import com.netflix.conductor.core.metadata.MetadataMapperService;
import com.netflix.conductor.core.operation.StartWorkflowOperation;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.ExecutionLockService;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.*;
import static java.util.Comparator.comparingInt;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.maxBy;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(
classes = {
TestObjectMapperConfiguration.class,
TestWorkflowExecutor.TestConfiguration.class
})
@RunWith(SpringRunner.class)
public class TestWorkflowExecutor {
private WorkflowExecutor workflowExecutor;
private ExecutionDAOFacade executionDAOFacade;
private MetadataDAO metadataDAO;
private QueueDAO queueDAO;
private WorkflowStatusListener workflowStatusListener;
private TaskStatusListener taskStatusListener;
private ExecutionLockService executionLockService;
private ExternalPayloadStorageUtils externalPayloadStorageUtils;
@Configuration
@ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans.
public static class TestConfiguration {
@Bean(TASK_TYPE_SUB_WORKFLOW)
public SubWorkflow subWorkflow(ObjectMapper objectMapper) {
return new SubWorkflow(objectMapper, mock(StartWorkflowOperation.class));
}
@Bean(TASK_TYPE_LAMBDA)
public Lambda lambda() {
return new Lambda();
}
@Bean(TASK_TYPE_WAIT)
public Wait waitBean() {
return new Wait();
}
@Bean("HTTP")
public WorkflowSystemTask http() {
return new WorkflowSystemTaskStub("HTTP") {
@Override
public boolean isAsync() {
return true;
}
};
}
@Bean("HTTP2")
public WorkflowSystemTask http2() {
return new WorkflowSystemTaskStub("HTTP2");
}
@Bean(TASK_TYPE_JSON_JQ_TRANSFORM)
public WorkflowSystemTask jsonBean() {
return new WorkflowSystemTaskStub("JSON_JQ_TRANSFORM") {
@Override
public boolean isAsync() {
return false;
}
@Override
public void start(
WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) {
task.setStatus(TaskModel.Status.COMPLETED);
}
};
}
@Bean
public SystemTaskRegistry systemTaskRegistry(Set<WorkflowSystemTask> tasks) {
return new SystemTaskRegistry(tasks);
}
}
@Autowired private ObjectMapper objectMapper;
@Autowired private SystemTaskRegistry systemTaskRegistry;
@Autowired private DefaultListableBeanFactory beanFactory;
@Autowired private Map<String, Evaluator> evaluators;
private ApplicationEventPublisher eventPublisher;
@Before
public void init() {
executionDAOFacade = mock(ExecutionDAOFacade.class);
metadataDAO = mock(MetadataDAO.class);
queueDAO = mock(QueueDAO.class);
workflowStatusListener = mock(WorkflowStatusListener.class);
taskStatusListener = mock(TaskStatusListener.class);
externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
executionLockService = mock(ExecutionLockService.class);
eventPublisher = mock(ApplicationEventPublisher.class);
ParametersUtils parametersUtils = new ParametersUtils(objectMapper);
IDGenerator idGenerator = new IDGenerator();
Map<String, TaskMapper> taskMappers = new HashMap<>();
taskMappers.put(DECISION.name(), new DecisionTaskMapper());
taskMappers.put(SWITCH.name(), new SwitchTaskMapper(evaluators));
taskMappers.put(DYNAMIC.name(), new DynamicTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(FORK_JOIN.name(), new ForkJoinTaskMapper());
taskMappers.put(JOIN.name(), new JoinTaskMapper());
taskMappers.put(
FORK_JOIN_DYNAMIC.name(),
new ForkJoinDynamicTaskMapper(
idGenerator, parametersUtils, objectMapper, metadataDAO));
taskMappers.put(
USER_DEFINED.name(), new UserDefinedTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(SIMPLE.name(), new SimpleTaskMapper(parametersUtils));
taskMappers.put(
SUB_WORKFLOW.name(), new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(EVENT.name(), new EventTaskMapper(parametersUtils));
taskMappers.put(WAIT.name(), new WaitTaskMapper(parametersUtils));
taskMappers.put(HTTP.name(), new HTTPTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(LAMBDA.name(), new LambdaTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(INLINE.name(), new InlineTaskMapper(parametersUtils, metadataDAO));
DeciderService deciderService =
new DeciderService(
idGenerator,
parametersUtils,
metadataDAO,
externalPayloadStorageUtils,
systemTaskRegistry,
taskMappers,
Duration.ofMinutes(60));
MetadataMapperService metadataMapperService = new MetadataMapperService(metadataDAO);
ConductorProperties properties = mock(ConductorProperties.class);
when(properties.getActiveWorkerLastPollTimeout()).thenReturn(Duration.ofSeconds(100));
when(properties.getTaskExecutionPostponeDuration()).thenReturn(Duration.ofSeconds(60));
when(properties.getWorkflowOffsetTimeout()).thenReturn(Duration.ofSeconds(30));
workflowExecutor =
new WorkflowExecutor(
deciderService,
metadataDAO,
queueDAO,
metadataMapperService,
workflowStatusListener,
taskStatusListener,
executionDAOFacade,
properties,
executionLockService,
systemTaskRegistry,
parametersUtils,
idGenerator,
eventPublisher);
}
@Test
public void testScheduleTask() {
IDGenerator idGenerator = new IDGenerator();
WorkflowSystemTaskStub httpTask = beanFactory.getBean("HTTP", WorkflowSystemTaskStub.class);
WorkflowSystemTaskStub http2Task =
beanFactory.getBean("HTTP2", WorkflowSystemTaskStub.class);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("1");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("1");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
List<TaskModel> tasks = new LinkedList<>();
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setWorkflowTaskType(TaskType.USER_DEFINED);
taskToSchedule.setType("HTTP");
WorkflowTask taskToSchedule2 = new WorkflowTask();
taskToSchedule2.setWorkflowTaskType(TaskType.USER_DEFINED);
taskToSchedule2.setType("HTTP2");
WorkflowTask wait = new WorkflowTask();
wait.setWorkflowTaskType(TaskType.WAIT);
wait.setType("WAIT");
wait.setTaskReferenceName("wait");
TaskModel task1 = new TaskModel();
task1.setTaskType(taskToSchedule.getType());
task1.setTaskDefName(taskToSchedule.getName());
task1.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
task1.setWorkflowInstanceId(workflow.getWorkflowId());
task1.setCorrelationId(workflow.getCorrelationId());
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setInputData(new HashMap<>());
task1.setStatus(TaskModel.Status.SCHEDULED);
task1.setRetryCount(0);
task1.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
task1.setWorkflowTask(taskToSchedule);
TaskModel task2 = new TaskModel();
task2.setTaskType(TASK_TYPE_WAIT);
task2.setTaskDefName(taskToSchedule.getName());
task2.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
task2.setWorkflowInstanceId(workflow.getWorkflowId());
task2.setCorrelationId(workflow.getCorrelationId());
task2.setScheduledTime(System.currentTimeMillis());
task2.setInputData(new HashMap<>());
task2.setTaskId(idGenerator.generate());
task2.setStatus(TaskModel.Status.IN_PROGRESS);
task2.setWorkflowTask(taskToSchedule);
TaskModel task3 = new TaskModel();
task3.setTaskType(taskToSchedule2.getType());
task3.setTaskDefName(taskToSchedule.getName());
task3.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
task3.setWorkflowInstanceId(workflow.getWorkflowId());
task3.setCorrelationId(workflow.getCorrelationId());
task3.setScheduledTime(System.currentTimeMillis());
task3.setTaskId(idGenerator.generate());
task3.setInputData(new HashMap<>());
task3.setStatus(TaskModel.Status.SCHEDULED);
task3.setRetryCount(0);
task3.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
task3.setWorkflowTask(taskToSchedule);
tasks.add(task1);
tasks.add(task2);
tasks.add(task3);
when(executionDAOFacade.createTasks(tasks)).thenReturn(tasks);
AtomicInteger startedTaskCount = new AtomicInteger(0);
doAnswer(
invocation -> {
startedTaskCount.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateTask(any());
AtomicInteger queuedTaskCount = new AtomicInteger(0);
final Answer answer =
invocation -> {
String queueName = invocation.getArgument(0, String.class);
queuedTaskCount.incrementAndGet();
return null;
};
doAnswer(answer).when(queueDAO).push(any(), any(), anyLong());
doAnswer(answer).when(queueDAO).push(any(), any(), anyInt(), anyLong());
boolean stateChanged = workflowExecutor.scheduleTask(workflow, tasks);
// Wait task is no async to it will be queued.
assertEquals(1, startedTaskCount.get());
assertEquals(2, queuedTaskCount.get());
assertTrue(stateChanged);
assertFalse(httpTask.isStarted());
assertTrue(http2Task.isStarted());
}
@Test(expected = TerminateWorkflowException.class)
public void testScheduleTaskFailure() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("wid_01");
List<TaskModel> tasks = new LinkedList<>();
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.TASK_TYPE_SIMPLE);
task1.setTaskDefName("task_1");
task1.setReferenceTaskName("task_1");
task1.setWorkflowInstanceId(workflow.getWorkflowId());
task1.setTaskId("tid_01");
task1.setStatus(TaskModel.Status.SCHEDULED);
task1.setRetryCount(0);
tasks.add(task1);
when(executionDAOFacade.createTasks(tasks)).thenThrow(new RuntimeException());
workflowExecutor.scheduleTask(workflow, tasks);
}
/** Simulate Queue push failures and assert that scheduleTask doesn't throw an exception. */
@Test
public void testQueueFailuresDuringScheduleTask() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("wid_01");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("wid");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
List<TaskModel> tasks = new LinkedList<>();
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.TASK_TYPE_SIMPLE);
task1.setTaskDefName("task_1");
task1.setReferenceTaskName("task_1");
task1.setWorkflowInstanceId(workflow.getWorkflowId());
task1.setTaskId("tid_01");
task1.setStatus(TaskModel.Status.SCHEDULED);
task1.setRetryCount(0);
tasks.add(task1);
when(executionDAOFacade.createTasks(tasks)).thenReturn(tasks);
doThrow(new RuntimeException())
.when(queueDAO)
.push(anyString(), anyString(), anyInt(), anyLong());
assertFalse(workflowExecutor.scheduleTask(workflow, tasks));
}
@Test
@SuppressWarnings("unchecked")
public void testCompleteWorkflow() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setWorkflowId("1");
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setOwnerApp("junit_test");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
workflow.setOutput(Collections.EMPTY_MAP);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateWorkflowCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateWorkflow(any());
AtomicInteger updateTasksCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateTasksCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateTasks(any());
AtomicInteger removeQueueEntryCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
removeQueueEntryCalledCounter.incrementAndGet();
return null;
})
.when(queueDAO)
.remove(anyString(), anyString());
workflowExecutor.completeWorkflow(workflow);
assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus());
assertEquals(1, updateWorkflowCalledCounter.get());
assertEquals(0, updateTasksCalledCounter.get());
assertEquals(0, removeQueueEntryCalledCounter.get());
verify(workflowStatusListener, times(1))
.onWorkflowCompletedIfEnabled(any(WorkflowModel.class));
verify(workflowStatusListener, times(0))
.onWorkflowFinalizedIfEnabled(any(WorkflowModel.class));
def.setWorkflowStatusListenerEnabled(true);
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflowExecutor.completeWorkflow(workflow);
verify(workflowStatusListener, times(2))
.onWorkflowCompletedIfEnabled(any(WorkflowModel.class));
verify(workflowStatusListener, times(0))
.onWorkflowFinalizedIfEnabled(any(WorkflowModel.class));
}
@Test
@SuppressWarnings("unchecked")
public void testTerminateWorkflow() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setWorkflowId("1");
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setOwnerApp("junit_test");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
workflow.setOutput(Collections.EMPTY_MAP);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateWorkflowCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateWorkflow(any());
AtomicInteger updateTasksCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateTasksCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateTasks(any());
AtomicInteger removeQueueEntryCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
removeQueueEntryCalledCounter.incrementAndGet();
return null;
})
.when(queueDAO)
.remove(anyString(), anyString());
workflowExecutor.terminateWorkflow("workflowId", "reason");
assertEquals(WorkflowModel.Status.TERMINATED, workflow.getStatus());
assertEquals(1, updateWorkflowCalledCounter.get());
assertEquals(1, removeQueueEntryCalledCounter.get());
verify(workflowStatusListener, times(1))
.onWorkflowTerminatedIfEnabled(any(WorkflowModel.class));
verify(workflowStatusListener, times(1))
.onWorkflowFinalizedIfEnabled(any(WorkflowModel.class));
def.setWorkflowStatusListenerEnabled(true);
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflowExecutor.completeWorkflow(workflow);
verify(workflowStatusListener, times(1))
.onWorkflowCompletedIfEnabled(any(WorkflowModel.class));
verify(workflowStatusListener, times(1))
.onWorkflowFinalizedIfEnabled(any(WorkflowModel.class));
}
@Test
public void testUploadOutputFailuresDuringTerminateWorkflow() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
def.setWorkflowStatusListenerEnabled(true);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setWorkflowId("1");
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setOwnerApp("junit_test");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
workflow.setOutput(Collections.EMPTY_MAP);
List<TaskModel> tasks = new LinkedList<>();
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(UUID.randomUUID().toString());
task.setReferenceTaskName("t1");
task.setWorkflowInstanceId(workflow.getWorkflowId());
task.setTaskDefName("task1");
task.setStatus(TaskModel.Status.IN_PROGRESS);
tasks.add(task);
workflow.setTasks(tasks);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateWorkflowCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateWorkflow(any());
doThrow(new RuntimeException("any exception"))
.when(externalPayloadStorageUtils)
.verifyAndUpload(workflow, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT);
workflowExecutor.terminateWorkflow(workflow.getWorkflowId(), "reason");
assertEquals(WorkflowModel.Status.TERMINATED, workflow.getStatus());
assertEquals(1, updateWorkflowCalledCounter.get());
verify(workflowStatusListener, times(1))
.onWorkflowTerminatedIfEnabled(any(WorkflowModel.class));
}
@Test
@SuppressWarnings("unchecked")
public void testQueueExceptionsIgnoredDuringTerminateWorkflow() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
def.setWorkflowStatusListenerEnabled(true);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setWorkflowId("1");
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setOwnerApp("junit_test");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
workflow.setOutput(Collections.EMPTY_MAP);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateWorkflowCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateWorkflow(any());
AtomicInteger updateTasksCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateTasksCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateTasks(any());
doThrow(new RuntimeException()).when(queueDAO).remove(anyString(), anyString());
workflowExecutor.terminateWorkflow("workflowId", "reason");
assertEquals(WorkflowModel.Status.TERMINATED, workflow.getStatus());
assertEquals(1, updateWorkflowCalledCounter.get());
verify(workflowStatusListener, times(1))
.onWorkflowTerminatedIfEnabled(any(WorkflowModel.class));
}
@Test
public void testRestartWorkflow() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("test_task");
workflowTask.setTaskReferenceName("task_ref");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testDef");
workflowDef.setVersion(1);
workflowDef.setRestartable(true);
workflowDef.getTasks().add(workflowTask);
TaskModel task_1 = new TaskModel();
task_1.setTaskId(UUID.randomUUID().toString());
task_1.setSeq(1);
task_1.setStatus(TaskModel.Status.FAILED);
task_1.setTaskDefName(workflowTask.getName());
task_1.setReferenceTaskName(workflowTask.getTaskReferenceName());
TaskModel task_2 = new TaskModel();
task_2.setTaskId(UUID.randomUUID().toString());
task_2.setSeq(2);
task_2.setStatus(TaskModel.Status.FAILED);
task_2.setTaskDefName(workflowTask.getName());
task_2.setReferenceTaskName(workflowTask.getTaskReferenceName());
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
workflow.setWorkflowId("test-workflow-id");
workflow.getTasks().addAll(Arrays.asList(task_1, task_2));
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow.setEndTime(500);
workflow.setLastRetriedTime(100);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
doNothing().when(executionDAOFacade).removeTask(any());
when(metadataDAO.getWorkflowDef(workflow.getWorkflowName(), workflow.getWorkflowVersion()))
.thenReturn(Optional.of(workflowDef));
when(metadataDAO.getTaskDef(workflowTask.getName())).thenReturn(new TaskDef());
when(executionDAOFacade.updateWorkflow(any())).thenReturn("");
workflowExecutor.restart(workflow.getWorkflowId(), false);
assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertEquals(0, workflow.getEndTime());
assertEquals(0, workflow.getLastRetriedTime());
verify(metadataDAO, never()).getLatestWorkflowDef(any());
ArgumentCaptor<WorkflowModel> argumentCaptor = ArgumentCaptor.forClass(WorkflowModel.class);
verify(executionDAOFacade, times(1)).createWorkflow(argumentCaptor.capture());
assertEquals(
workflow.getWorkflowId(), argumentCaptor.getAllValues().get(0).getWorkflowId());
assertEquals(
workflow.getWorkflowDefinition(),
argumentCaptor.getAllValues().get(0).getWorkflowDefinition());
// add a new version of the workflow definition and restart with latest
workflow.setStatus(WorkflowModel.Status.COMPLETED);
workflow.setEndTime(500);
workflow.setLastRetriedTime(100);
workflowDef = new WorkflowDef();
workflowDef.setName("testDef");
workflowDef.setVersion(2);
workflowDef.setRestartable(true);
workflowDef.getTasks().addAll(Collections.singletonList(workflowTask));
when(metadataDAO.getLatestWorkflowDef(workflow.getWorkflowName()))
.thenReturn(Optional.of(workflowDef));
workflowExecutor.restart(workflow.getWorkflowId(), true);
assertEquals(WorkflowModel.Status.COMPLETED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertEquals(0, workflow.getEndTime());
assertEquals(0, workflow.getLastRetriedTime());
verify(metadataDAO, times(1)).getLatestWorkflowDef(anyString());
argumentCaptor = ArgumentCaptor.forClass(WorkflowModel.class);
verify(executionDAOFacade, times(2)).createWorkflow(argumentCaptor.capture());
assertEquals(
workflow.getWorkflowId(), argumentCaptor.getAllValues().get(1).getWorkflowId());
assertEquals(workflowDef, argumentCaptor.getAllValues().get(1).getWorkflowDefinition());
}
@Test(expected = NotFoundException.class)
public void testRetryNonTerminalWorkflow() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryNonTerminalWorkflow");
workflow.setStatus(WorkflowModel.Status.RUNNING);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
workflowExecutor.retry(workflow.getWorkflowId(), false);
}
@Test(expected = ConflictException.class)
public void testRetryWorkflowNoTasks() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("ApplicationException");
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow.setTasks(Collections.emptyList());
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
workflowExecutor.retry(workflow.getWorkflowId(), false);
}
@Test(expected = ConflictException.class)
public void testRetryWorkflowNoFailedTasks() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRetryWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
// add 2 failed task in 2 forks and 1 cancelled in the 3rd fork
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(1);
task_1_1.setRetryCount(0);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.FAILED);
task_1_1.setTaskDefName("task1");
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_1_2 = new TaskModel();
task_1_2.setTaskId(UUID.randomUUID().toString());
task_1_2.setSeq(2);
task_1_2.setRetryCount(1);
task_1_2.setTaskType(TaskType.SIMPLE.toString());
task_1_2.setStatus(TaskModel.Status.COMPLETED);
task_1_2.setTaskDefName("task1");
task_1_2.setReferenceTaskName("task1_ref1");
workflow.getTasks().addAll(Arrays.asList(task_1_1, task_1_2));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
workflowExecutor.retry(workflow.getWorkflowId(), false);
}
@Test
public void testRetryWorkflow() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRetryWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateWorkflowCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateWorkflow(any());
AtomicInteger updateTasksCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateTasksCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateTasks(any());
AtomicInteger updateTaskCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateTaskCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateTask(any());
// add 2 failed task in 2 forks and 1 cancelled in the 3rd fork
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(20);
task_1_1.setRetryCount(1);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.CANCELED);
task_1_1.setRetried(true);
task_1_1.setTaskDefName("task1");
task_1_1.setWorkflowTask(new WorkflowTask());
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_1_2 = new TaskModel();
task_1_2.setTaskId(UUID.randomUUID().toString());
task_1_2.setSeq(21);
task_1_2.setRetryCount(1);
task_1_2.setTaskType(TaskType.SIMPLE.toString());
task_1_2.setStatus(TaskModel.Status.FAILED);
task_1_2.setTaskDefName("task1");
task_1_2.setWorkflowTask(new WorkflowTask());
task_1_2.setReferenceTaskName("task1_ref1");
TaskModel task_2_1 = new TaskModel();
task_2_1.setTaskId(UUID.randomUUID().toString());
task_2_1.setSeq(22);
task_2_1.setRetryCount(1);
task_2_1.setStatus(TaskModel.Status.FAILED);
task_2_1.setTaskType(TaskType.SIMPLE.toString());
task_2_1.setTaskDefName("task2");
task_2_1.setWorkflowTask(new WorkflowTask());
task_2_1.setReferenceTaskName("task2_ref1");
TaskModel task_3_1 = new TaskModel();
task_3_1.setTaskId(UUID.randomUUID().toString());
task_3_1.setSeq(23);
task_3_1.setRetryCount(1);
task_3_1.setStatus(TaskModel.Status.CANCELED);
task_3_1.setTaskType(TaskType.SIMPLE.toString());
task_3_1.setTaskDefName("task3");
task_3_1.setWorkflowTask(new WorkflowTask());
task_3_1.setReferenceTaskName("task3_ref1");
TaskModel task_4_1 = new TaskModel();
task_4_1.setTaskId(UUID.randomUUID().toString());
task_4_1.setSeq(122);
task_4_1.setRetryCount(1);
task_4_1.setStatus(TaskModel.Status.FAILED);
task_4_1.setTaskType(TaskType.SIMPLE.toString());
task_4_1.setTaskDefName("task1");
task_4_1.setWorkflowTask(new WorkflowTask());
task_4_1.setReferenceTaskName("task4_refABC");
workflow.getTasks().addAll(Arrays.asList(task_1_1, task_1_2, task_2_1, task_3_1, task_4_1));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
workflowExecutor.retry(workflow.getWorkflowId(), false);
// then:
assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertEquals(1, updateWorkflowCalledCounter.get());
assertEquals(1, updateTasksCalledCounter.get());
assertEquals(0, updateTaskCalledCounter.get());
}
@Test
public void testRetryWorkflowReturnsNoDuplicates() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRetryWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(10);
task_1_1.setRetryCount(0);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.FAILED);
task_1_1.setTaskDefName("task1");
task_1_1.setWorkflowTask(new WorkflowTask());
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_1_2 = new TaskModel();
task_1_2.setTaskId(UUID.randomUUID().toString());
task_1_2.setSeq(11);
task_1_2.setRetryCount(1);
task_1_2.setTaskType(TaskType.SIMPLE.toString());
task_1_2.setStatus(TaskModel.Status.COMPLETED);
task_1_2.setTaskDefName("task1");
task_1_2.setWorkflowTask(new WorkflowTask());
task_1_2.setReferenceTaskName("task1_ref1");
TaskModel task_2_1 = new TaskModel();
task_2_1.setTaskId(UUID.randomUUID().toString());
task_2_1.setSeq(21);
task_2_1.setRetryCount(0);
task_2_1.setStatus(TaskModel.Status.CANCELED);
task_2_1.setTaskType(TaskType.SIMPLE.toString());
task_2_1.setTaskDefName("task2");
task_2_1.setWorkflowTask(new WorkflowTask());
task_2_1.setReferenceTaskName("task2_ref1");
TaskModel task_3_1 = new TaskModel();
task_3_1.setTaskId(UUID.randomUUID().toString());
task_3_1.setSeq(31);
task_3_1.setRetryCount(1);
task_3_1.setStatus(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR);
task_3_1.setTaskType(TaskType.SIMPLE.toString());
task_3_1.setTaskDefName("task1");
task_3_1.setWorkflowTask(new WorkflowTask());
task_3_1.setReferenceTaskName("task3_ref1");
TaskModel task_4_1 = new TaskModel();
task_4_1.setTaskId(UUID.randomUUID().toString());
task_4_1.setSeq(41);
task_4_1.setRetryCount(0);
task_4_1.setStatus(TaskModel.Status.TIMED_OUT);
task_4_1.setTaskType(TaskType.SIMPLE.toString());
task_4_1.setTaskDefName("task1");
task_4_1.setWorkflowTask(new WorkflowTask());
task_4_1.setReferenceTaskName("task4_ref1");
workflow.getTasks().addAll(Arrays.asList(task_1_1, task_1_2, task_2_1, task_3_1, task_4_1));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
workflowExecutor.retry(workflow.getWorkflowId(), false);
assertEquals(8, workflow.getTasks().size());
}
@Test
public void testRetryWorkflowMultipleRetries() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRetryWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(10);
task_1_1.setRetryCount(0);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.FAILED);
task_1_1.setTaskDefName("task1");
task_1_1.setWorkflowTask(new WorkflowTask());
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_2_1 = new TaskModel();
task_2_1.setTaskId(UUID.randomUUID().toString());
task_2_1.setSeq(20);
task_2_1.setRetryCount(0);
task_2_1.setTaskType(TaskType.SIMPLE.toString());
task_2_1.setStatus(TaskModel.Status.CANCELED);
task_2_1.setTaskDefName("task1");
task_2_1.setWorkflowTask(new WorkflowTask());
task_2_1.setReferenceTaskName("task2_ref1");
workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
workflowExecutor.retry(workflow.getWorkflowId(), false);
assertEquals(4, workflow.getTasks().size());
// Reset Last Workflow Task to FAILED.
TaskModel lastTask =
workflow.getTasks().stream()
.filter(t -> t.getReferenceTaskName().equals("task1_ref1"))
.collect(
groupingBy(
TaskModel::getReferenceTaskName,
maxBy(comparingInt(TaskModel::getSeq))))
.values()
.stream()
.map(Optional::get)
.collect(Collectors.toList())
.get(0);
lastTask.setStatus(TaskModel.Status.FAILED);
workflow.setStatus(WorkflowModel.Status.FAILED);
workflowExecutor.retry(workflow.getWorkflowId(), false);
assertEquals(5, workflow.getTasks().size());
// Reset Last Workflow Task to FAILED.
// Reset Last Workflow Task to FAILED.
TaskModel lastTask2 =
workflow.getTasks().stream()
.filter(t -> t.getReferenceTaskName().equals("task1_ref1"))
.collect(
groupingBy(
TaskModel::getReferenceTaskName,
maxBy(comparingInt(TaskModel::getSeq))))
.values()
.stream()
.map(Optional::get)
.collect(Collectors.toList())
.get(0);
lastTask2.setStatus(TaskModel.Status.FAILED);
workflow.setStatus(WorkflowModel.Status.FAILED);
workflowExecutor.retry(workflow.getWorkflowId(), false);
assertEquals(6, workflow.getTasks().size());
}
@Test
public void testRetryWorkflowWithJoinTask() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRetryWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
TaskModel forkTask = new TaskModel();
forkTask.setTaskType(TaskType.FORK_JOIN.toString());
forkTask.setTaskId(UUID.randomUUID().toString());
forkTask.setSeq(1);
forkTask.setRetryCount(1);
forkTask.setStatus(TaskModel.Status.COMPLETED);
forkTask.setReferenceTaskName("task_fork");
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(20);
task_1_1.setRetryCount(1);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.FAILED);
task_1_1.setTaskDefName("task1");
task_1_1.setWorkflowTask(new WorkflowTask());
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_2_1 = new TaskModel();
task_2_1.setTaskId(UUID.randomUUID().toString());
task_2_1.setSeq(22);
task_2_1.setRetryCount(1);
task_2_1.setStatus(TaskModel.Status.CANCELED);
task_2_1.setTaskType(TaskType.SIMPLE.toString());
task_2_1.setTaskDefName("task2");
task_2_1.setWorkflowTask(new WorkflowTask());
task_2_1.setReferenceTaskName("task2_ref1");
TaskModel joinTask = new TaskModel();
joinTask.setTaskType(TaskType.JOIN.toString());
joinTask.setTaskId(UUID.randomUUID().toString());
joinTask.setSeq(25);
joinTask.setRetryCount(1);
joinTask.setStatus(TaskModel.Status.CANCELED);
joinTask.setReferenceTaskName("task_join");
joinTask.getInputData()
.put(
"joinOn",
Arrays.asList(
task_1_1.getReferenceTaskName(), task_2_1.getReferenceTaskName()));
workflow.getTasks().addAll(Arrays.asList(forkTask, task_1_1, task_2_1, joinTask));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
workflowExecutor.retry(workflow.getWorkflowId(), false);
assertEquals(6, workflow.getTasks().size());
assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
}
@Test
public void testRetryFromLastFailedSubWorkflowTaskThenStartWithLastFailedTask() {
IDGenerator idGenerator = new IDGenerator();
// given
String id = idGenerator.generate();
String workflowInstanceId = idGenerator.generate();
TaskModel task = new TaskModel();
task.setTaskType(TaskType.SIMPLE.name());
task.setTaskDefName("task");
task.setReferenceTaskName("task_ref");
task.setWorkflowInstanceId(workflowInstanceId);
task.setScheduledTime(System.currentTimeMillis());
task.setTaskId(idGenerator.generate());
task.setStatus(TaskModel.Status.COMPLETED);
task.setRetryCount(0);
task.setWorkflowTask(new WorkflowTask());
task.setOutputData(new HashMap<>());
task.setSubWorkflowId(id);
task.setSeq(1);
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.SIMPLE.name());
task1.setTaskDefName("task1");
task1.setReferenceTaskName("task1_ref");
task1.setWorkflowInstanceId(workflowInstanceId);
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setStatus(TaskModel.Status.FAILED);
task1.setRetryCount(0);
task1.setWorkflowTask(new WorkflowTask());
task1.setOutputData(new HashMap<>());
task1.setSubWorkflowId(id);
task1.setSeq(2);
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setWorkflowId(id);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("subworkflow");
workflowDef.setVersion(1);
subWorkflow.setWorkflowDefinition(workflowDef);
subWorkflow.setStatus(WorkflowModel.Status.FAILED);
subWorkflow.getTasks().addAll(Arrays.asList(task, task1));
subWorkflow.setParentWorkflowId("testRunWorkflowId");
TaskModel task2 = new TaskModel();
task2.setWorkflowInstanceId(subWorkflow.getWorkflowId());
task2.setScheduledTime(System.currentTimeMillis());
task2.setTaskId(idGenerator.generate());
task2.setStatus(TaskModel.Status.FAILED);
task2.setRetryCount(0);
task2.setOutputData(new HashMap<>());
task2.setSubWorkflowId(id);
task2.setTaskType(TaskType.SUB_WORKFLOW.name());
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRunWorkflowId");
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow.setTasks(Collections.singletonList(task2));
workflowDef = new WorkflowDef();
workflowDef.setName("first_workflow");
workflow.setWorkflowDefinition(workflowDef);
// when
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true))
.thenReturn(subWorkflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(workflowDef));
when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId()))
.thenReturn(task1);
when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false))
.thenReturn(workflow);
workflowExecutor.retry(workflow.getWorkflowId(), true);
// then
assertEquals(task.getStatus(), TaskModel.Status.COMPLETED);
assertEquals(task1.getStatus(), TaskModel.Status.IN_PROGRESS);
assertEquals(workflow.getPreviousStatus(), WorkflowModel.Status.FAILED);
assertEquals(workflow.getStatus(), WorkflowModel.Status.RUNNING);
assertEquals(subWorkflow.getPreviousStatus(), WorkflowModel.Status.FAILED);
assertEquals(subWorkflow.getStatus(), WorkflowModel.Status.RUNNING);
}
@Test
public void testRetryTimedOutWorkflowWithoutFailedTasks() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRetryWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.TIMED_OUT);
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(20);
task_1_1.setRetryCount(1);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.COMPLETED);
task_1_1.setRetried(true);
task_1_1.setTaskDefName("task1");
task_1_1.setWorkflowTask(new WorkflowTask());
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_2_1 = new TaskModel();
task_2_1.setTaskId(UUID.randomUUID().toString());
task_2_1.setSeq(22);
task_2_1.setRetryCount(1);
task_2_1.setStatus(TaskModel.Status.COMPLETED);
task_2_1.setTaskType(TaskType.SIMPLE.toString());
task_2_1.setTaskDefName("task2");
task_2_1.setWorkflowTask(new WorkflowTask());
task_2_1.setReferenceTaskName("task2_ref1");
workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1));
AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateWorkflowCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateWorkflow(any());
AtomicInteger updateTasksCalledCounter = new AtomicInteger(0);
doAnswer(
invocation -> {
updateTasksCalledCounter.incrementAndGet();
return null;
})
.when(executionDAOFacade)
.updateTasks(any());
// end of setup
// when
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
workflowExecutor.retry(workflow.getWorkflowId(), false);
// then
assertEquals(WorkflowModel.Status.TIMED_OUT, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertTrue(workflow.getLastRetriedTime() > 0);
assertEquals(1, updateWorkflowCalledCounter.get());
assertEquals(1, updateTasksCalledCounter.get());
}
@Test(expected = ConflictException.class)
public void testRerunNonTerminalWorkflow() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryNonTerminalWorkflow");
workflow.setStatus(WorkflowModel.Status.RUNNING);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest();
rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId());
workflowExecutor.rerun(rerunWorkflowRequest);
}
@Test
public void testRerunWorkflow() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRerunWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRerunWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow.setReasonForIncompletion("task1 failed");
workflow.setFailedReferenceTaskNames(
new HashSet<>() {
{
add("task1_ref1");
}
});
workflow.setFailedTaskNames(
new HashSet<>() {
{
add("task1");
}
});
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(20);
task_1_1.setRetryCount(1);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.FAILED);
task_1_1.setRetried(true);
task_1_1.setTaskDefName("task1");
task_1_1.setWorkflowTask(new WorkflowTask());
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_2_1 = new TaskModel();
task_2_1.setTaskId(UUID.randomUUID().toString());
task_2_1.setSeq(22);
task_2_1.setRetryCount(1);
task_2_1.setStatus(TaskModel.Status.CANCELED);
task_2_1.setTaskType(TaskType.SIMPLE.toString());
task_2_1.setTaskDefName("task2");
task_2_1.setWorkflowTask(new WorkflowTask());
task_2_1.setReferenceTaskName("task2_ref1");
workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest();
rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId());
workflowExecutor.rerun(rerunWorkflowRequest);
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertNull(workflow.getReasonForIncompletion());
assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames());
assertEquals(new HashSet<>(), workflow.getFailedTaskNames());
}
@Test
public void testRerunSubWorkflow() {
IDGenerator idGenerator = new IDGenerator();
// setup
String parentWorkflowId = idGenerator.generate();
String subWorkflowId = idGenerator.generate();
// sub workflow setup
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.SIMPLE.name());
task1.setTaskDefName("task1");
task1.setReferenceTaskName("task1_ref");
task1.setWorkflowInstanceId(subWorkflowId);
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setStatus(TaskModel.Status.COMPLETED);
task1.setWorkflowTask(new WorkflowTask());
task1.setOutputData(new HashMap<>());
TaskModel task2 = new TaskModel();
task2.setTaskType(TaskType.SIMPLE.name());
task2.setTaskDefName("task2");
task2.setReferenceTaskName("task2_ref");
task2.setWorkflowInstanceId(subWorkflowId);
task2.setScheduledTime(System.currentTimeMillis());
task2.setTaskId(idGenerator.generate());
task2.setStatus(TaskModel.Status.COMPLETED);
task2.setWorkflowTask(new WorkflowTask());
task2.setOutputData(new HashMap<>());
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setParentWorkflowId(parentWorkflowId);
subWorkflow.setWorkflowId(subWorkflowId);
WorkflowDef subworkflowDef = new WorkflowDef();
subworkflowDef.setName("subworkflow");
subworkflowDef.setVersion(1);
subWorkflow.setWorkflowDefinition(subworkflowDef);
subWorkflow.setOwnerApp("junit_testRerunWorkflowId");
subWorkflow.setStatus(WorkflowModel.Status.COMPLETED);
subWorkflow.getTasks().addAll(Arrays.asList(task1, task2));
// parent workflow setup
TaskModel task = new TaskModel();
task.setWorkflowInstanceId(parentWorkflowId);
task.setScheduledTime(System.currentTimeMillis());
task.setTaskId(idGenerator.generate());
task.setStatus(TaskModel.Status.COMPLETED);
task.setOutputData(new HashMap<>());
task.setSubWorkflowId(subWorkflowId);
task.setTaskType(TaskType.SUB_WORKFLOW.name());
task.setWorkflowTask(new WorkflowTask());
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(parentWorkflowId);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("parentworkflow");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setStatus(WorkflowModel.Status.COMPLETED);
workflow.getTasks().addAll(Arrays.asList(task));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true))
.thenReturn(subWorkflow);
when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId()))
.thenReturn(task);
when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false))
.thenReturn(workflow);
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest();
rerunWorkflowRequest.setReRunFromWorkflowId(subWorkflow.getWorkflowId());
workflowExecutor.rerun(rerunWorkflowRequest);
// then:
assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus());
assertEquals(WorkflowModel.Status.COMPLETED, subWorkflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus());
assertEquals(WorkflowModel.Status.COMPLETED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
}
@Test
public void testRerunWorkflowWithTaskId() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRerunWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow.setReasonForIncompletion("task1 failed");
workflow.setFailedReferenceTaskNames(
new HashSet<>() {
{
add("task1_ref1");
}
});
workflow.setFailedTaskNames(
new HashSet<>() {
{
add("task1");
}
});
TaskModel task_1_1 = new TaskModel();
task_1_1.setTaskId(UUID.randomUUID().toString());
task_1_1.setSeq(20);
task_1_1.setRetryCount(1);
task_1_1.setTaskType(TaskType.SIMPLE.toString());
task_1_1.setStatus(TaskModel.Status.FAILED);
task_1_1.setRetried(true);
task_1_1.setTaskDefName("task1");
task_1_1.setWorkflowTask(new WorkflowTask());
task_1_1.setReferenceTaskName("task1_ref1");
TaskModel task_2_1 = new TaskModel();
task_2_1.setTaskId(UUID.randomUUID().toString());
task_2_1.setSeq(22);
task_2_1.setRetryCount(1);
task_2_1.setStatus(TaskModel.Status.CANCELED);
task_2_1.setTaskType(TaskType.SIMPLE.toString());
task_2_1.setTaskDefName("task2");
task_2_1.setWorkflowTask(new WorkflowTask());
task_2_1.setReferenceTaskName("task2_ref1");
workflow.getTasks().addAll(Arrays.asList(task_1_1, task_2_1));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
when(metadataDAO.getWorkflowDef(anyString(), anyInt()))
.thenReturn(Optional.of(new WorkflowDef()));
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest();
rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId());
rerunWorkflowRequest.setReRunFromTaskId(task_1_1.getTaskId());
workflowExecutor.rerun(rerunWorkflowRequest);
// when:
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertNull(workflow.getReasonForIncompletion());
assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames());
assertEquals(new HashSet<>(), workflow.getFailedTaskNames());
}
@Test
public void testRerunWorkflowWithSyncSystemTaskId() {
IDGenerator idGenerator = new IDGenerator();
// setup
String workflowId = idGenerator.generate();
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.SIMPLE.name());
task1.setTaskDefName("task1");
task1.setReferenceTaskName("task1_ref");
task1.setWorkflowInstanceId(workflowId);
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setStatus(TaskModel.Status.COMPLETED);
task1.setWorkflowTask(new WorkflowTask());
task1.setOutputData(new HashMap<>());
TaskModel task2 = new TaskModel();
task2.setTaskType(TaskType.JSON_JQ_TRANSFORM.name());
task2.setReferenceTaskName("task2_ref");
task2.setWorkflowInstanceId(workflowId);
task2.setScheduledTime(System.currentTimeMillis());
task2.setTaskId("system-task-id");
task2.setStatus(TaskModel.Status.FAILED);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("workflow");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow.setReasonForIncompletion("task2 failed");
workflow.setFailedReferenceTaskNames(
new HashSet<>() {
{
add("task2_ref");
}
});
workflow.setFailedTaskNames(
new HashSet<>() {
{
add("task2");
}
});
workflow.getTasks().addAll(Arrays.asList(task1, task2));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest();
rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId());
rerunWorkflowRequest.setReRunFromTaskId(task2.getTaskId());
workflowExecutor.rerun(rerunWorkflowRequest);
// then:
assertEquals(TaskModel.Status.COMPLETED, task2.getStatus());
assertEquals(WorkflowModel.Status.FAILED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertNull(workflow.getReasonForIncompletion());
assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames());
assertEquals(new HashSet<>(), workflow.getFailedTaskNames());
}
@Test
public void testRerunSubWorkflowWithTaskId() {
IDGenerator idGenerator = new IDGenerator();
// setup
String parentWorkflowId = idGenerator.generate();
String subWorkflowId = idGenerator.generate();
// sub workflow setup
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.SIMPLE.name());
task1.setTaskDefName("task1");
task1.setReferenceTaskName("task1_ref");
task1.setWorkflowInstanceId(subWorkflowId);
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setStatus(TaskModel.Status.COMPLETED);
task1.setWorkflowTask(new WorkflowTask());
task1.setOutputData(new HashMap<>());
TaskModel task2 = new TaskModel();
task2.setTaskType(TaskType.SIMPLE.name());
task2.setTaskDefName("task2");
task2.setReferenceTaskName("task2_ref");
task2.setWorkflowInstanceId(subWorkflowId);
task2.setScheduledTime(System.currentTimeMillis());
task2.setTaskId(idGenerator.generate());
task2.setStatus(TaskModel.Status.COMPLETED);
task2.setWorkflowTask(new WorkflowTask());
task2.setOutputData(new HashMap<>());
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setParentWorkflowId(parentWorkflowId);
subWorkflow.setWorkflowId(subWorkflowId);
WorkflowDef subworkflowDef = new WorkflowDef();
subworkflowDef.setName("subworkflow");
subworkflowDef.setVersion(1);
subWorkflow.setWorkflowDefinition(subworkflowDef);
subWorkflow.setOwnerApp("junit_testRerunWorkflowId");
subWorkflow.setStatus(WorkflowModel.Status.COMPLETED);
subWorkflow.getTasks().addAll(Arrays.asList(task1, task2));
// parent workflow setup
TaskModel task = new TaskModel();
task.setWorkflowInstanceId(parentWorkflowId);
task.setScheduledTime(System.currentTimeMillis());
task.setTaskId(idGenerator.generate());
task.setStatus(TaskModel.Status.COMPLETED);
task.setOutputData(new HashMap<>());
task.setSubWorkflowId(subWorkflowId);
task.setTaskType(TaskType.SUB_WORKFLOW.name());
task.setWorkflowTask(new WorkflowTask());
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(parentWorkflowId);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("parentworkflow");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setStatus(WorkflowModel.Status.COMPLETED);
workflow.getTasks().addAll(Arrays.asList(task));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true))
.thenReturn(subWorkflow);
when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId()))
.thenReturn(task);
when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false))
.thenReturn(workflow);
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest();
rerunWorkflowRequest.setReRunFromWorkflowId(subWorkflow.getWorkflowId());
rerunWorkflowRequest.setReRunFromTaskId(task2.getTaskId());
workflowExecutor.rerun(rerunWorkflowRequest);
// then:
assertEquals(TaskModel.Status.SCHEDULED, task2.getStatus());
assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus());
assertEquals(WorkflowModel.Status.COMPLETED, subWorkflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus());
assertEquals(WorkflowModel.Status.COMPLETED, workflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
}
@Test
public void testGetActiveDomain() throws Exception {
String taskType = "test-task";
String[] domains = new String[] {"domain1", "domain2"};
PollData pollData1 =
new PollData(
"queue1", domains[0], "worker1", System.currentTimeMillis() - 99 * 1000);
when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[0]))
.thenReturn(pollData1);
String activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertEquals(domains[0], activeDomain);
Thread.sleep(2000L);
PollData pollData2 =
new PollData(
"queue2", domains[1], "worker2", System.currentTimeMillis() - 99 * 1000);
when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[1]))
.thenReturn(pollData2);
activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertEquals(domains[1], activeDomain);
Thread.sleep(2000L);
activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertEquals(domains[1], activeDomain);
domains = new String[] {""};
when(executionDAOFacade.getTaskPollDataByDomain(any(), any())).thenReturn(new PollData());
activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertNotNull(activeDomain);
assertEquals("", activeDomain);
domains = new String[] {};
activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertNull(activeDomain);
activeDomain = workflowExecutor.getActiveDomain(taskType, null);
assertNull(activeDomain);
domains = new String[] {"test-domain"};
when(executionDAOFacade.getTaskPollDataByDomain(anyString(), anyString())).thenReturn(null);
activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertNotNull(activeDomain);
assertEquals("test-domain", activeDomain);
}
@Test
public void testInactiveDomains() {
String taskType = "test-task";
String[] domains = new String[] {"domain1", "domain2"};
PollData pollData1 =
new PollData(
"queue1", domains[0], "worker1", System.currentTimeMillis() - 99 * 10000);
when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[0]))
.thenReturn(pollData1);
when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[1])).thenReturn(null);
String activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertEquals("domain2", activeDomain);
}
@Test
public void testDefaultDomain() {
String taskType = "test-task";
String[] domains = new String[] {"domain1", "domain2", "NO_DOMAIN"};
PollData pollData1 =
new PollData(
"queue1", domains[0], "worker1", System.currentTimeMillis() - 99 * 10000);
when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[0]))
.thenReturn(pollData1);
when(executionDAOFacade.getTaskPollDataByDomain(taskType, domains[1])).thenReturn(null);
String activeDomain = workflowExecutor.getActiveDomain(taskType, domains);
assertNull(activeDomain);
}
@Test
public void testTaskToDomain() {
WorkflowModel workflow = generateSampleWorkflow();
List<TaskModel> tasks = generateSampleTasks(3);
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "mydomain");
workflow.setTaskToDomain(taskToDomain);
PollData pollData1 =
new PollData(
"queue1", "mydomain", "worker1", System.currentTimeMillis() - 99 * 100);
when(executionDAOFacade.getTaskPollDataByDomain(anyString(), anyString()))
.thenReturn(pollData1);
workflowExecutor.setTaskDomains(tasks, workflow);
assertNotNull(tasks);
tasks.forEach(task -> assertEquals("mydomain", task.getDomain()));
}
@Test
public void testTaskToDomainsPerTask() {
WorkflowModel workflow = generateSampleWorkflow();
List<TaskModel> tasks = generateSampleTasks(2);
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "mydomain, NO_DOMAIN");
workflow.setTaskToDomain(taskToDomain);
PollData pollData1 =
new PollData(
"queue1", "mydomain", "worker1", System.currentTimeMillis() - 99 * 100);
when(executionDAOFacade.getTaskPollDataByDomain(eq("task1"), anyString()))
.thenReturn(pollData1);
when(executionDAOFacade.getTaskPollDataByDomain(eq("task2"), anyString())).thenReturn(null);
workflowExecutor.setTaskDomains(tasks, workflow);
assertEquals("mydomain", tasks.get(0).getDomain());
assertNull(tasks.get(1).getDomain());
}
@Test
public void testTaskToDomainOverrides() {
WorkflowModel workflow = generateSampleWorkflow();
List<TaskModel> tasks = generateSampleTasks(4);
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "mydomain");
taskToDomain.put("task2", "someInactiveDomain, NO_DOMAIN");
taskToDomain.put("task3", "someActiveDomain, NO_DOMAIN");
taskToDomain.put("task4", "someInactiveDomain, someInactiveDomain2");
workflow.setTaskToDomain(taskToDomain);
PollData pollData1 =
new PollData(
"queue1", "mydomain", "worker1", System.currentTimeMillis() - 99 * 100);
PollData pollData2 =
new PollData(
"queue2",
"someActiveDomain",
"worker2",
System.currentTimeMillis() - 99 * 100);
when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("mydomain")))
.thenReturn(pollData1);
when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("someInactiveDomain")))
.thenReturn(null);
when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("someActiveDomain")))
.thenReturn(pollData2);
when(executionDAOFacade.getTaskPollDataByDomain(anyString(), eq("someInactiveDomain")))
.thenReturn(null);
workflowExecutor.setTaskDomains(tasks, workflow);
assertEquals("mydomain", tasks.get(0).getDomain());
assertNull(tasks.get(1).getDomain());
assertEquals("someActiveDomain", tasks.get(2).getDomain());
assertEquals("someInactiveDomain2", tasks.get(3).getDomain());
}
@Test
public void testDedupAndAddTasks() {
WorkflowModel workflow = new WorkflowModel();
TaskModel task1 = new TaskModel();
task1.setReferenceTaskName("task1");
task1.setRetryCount(1);
TaskModel task2 = new TaskModel();
task2.setReferenceTaskName("task2");
task2.setRetryCount(2);
List<TaskModel> tasks = new ArrayList<>(Arrays.asList(task1, task2));
List<TaskModel> taskList = workflowExecutor.dedupAndAddTasks(workflow, tasks);
assertEquals(2, taskList.size());
assertEquals(tasks, taskList);
assertEquals(workflow.getTasks(), taskList);
// Adding the same tasks again
taskList = workflowExecutor.dedupAndAddTasks(workflow, tasks);
assertEquals(0, taskList.size());
assertEquals(workflow.getTasks(), tasks);
// Adding 2 new tasks
TaskModel newTask = new TaskModel();
newTask.setReferenceTaskName("newTask");
newTask.setRetryCount(0);
taskList = workflowExecutor.dedupAndAddTasks(workflow, Collections.singletonList(newTask));
assertEquals(1, taskList.size());
assertEquals(newTask, taskList.get(0));
assertEquals(3, workflow.getTasks().size());
}
@Test(expected = ConflictException.class)
public void testTerminateCompletedWorkflow() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testTerminateTerminalWorkflow");
workflow.setStatus(WorkflowModel.Status.COMPLETED);
when(executionDAOFacade.getWorkflowModel(anyString(), anyBoolean())).thenReturn(workflow);
workflowExecutor.terminateWorkflow(
workflow.getWorkflowId(), "test terminating terminal workflow");
}
@Test
public void testResetCallbacksForWorkflowTasks() {
String workflowId = "test-workflow-id";
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
workflow.setStatus(WorkflowModel.Status.RUNNING);
TaskModel completedTask = new TaskModel();
completedTask.setTaskType(TaskType.SIMPLE.name());
completedTask.setReferenceTaskName("completedTask");
completedTask.setWorkflowInstanceId(workflowId);
completedTask.setScheduledTime(System.currentTimeMillis());
completedTask.setCallbackAfterSeconds(300);
completedTask.setTaskId("simple-task-id");
completedTask.setStatus(TaskModel.Status.COMPLETED);
TaskModel systemTask = new TaskModel();
systemTask.setTaskType(TaskType.WAIT.name());
systemTask.setReferenceTaskName("waitTask");
systemTask.setWorkflowInstanceId(workflowId);
systemTask.setScheduledTime(System.currentTimeMillis());
systemTask.setTaskId("system-task-id");
systemTask.setStatus(TaskModel.Status.SCHEDULED);
TaskModel simpleTask = new TaskModel();
simpleTask.setTaskType(TaskType.SIMPLE.name());
simpleTask.setReferenceTaskName("simpleTask");
simpleTask.setWorkflowInstanceId(workflowId);
simpleTask.setScheduledTime(System.currentTimeMillis());
simpleTask.setCallbackAfterSeconds(300);
simpleTask.setTaskId("simple-task-id");
simpleTask.setStatus(TaskModel.Status.SCHEDULED);
TaskModel noCallbackTask = new TaskModel();
noCallbackTask.setTaskType(TaskType.SIMPLE.name());
noCallbackTask.setReferenceTaskName("noCallbackTask");
noCallbackTask.setWorkflowInstanceId(workflowId);
noCallbackTask.setScheduledTime(System.currentTimeMillis());
noCallbackTask.setCallbackAfterSeconds(0);
noCallbackTask.setTaskId("no-callback-task-id");
noCallbackTask.setStatus(TaskModel.Status.SCHEDULED);
workflow.getTasks()
.addAll(Arrays.asList(completedTask, systemTask, simpleTask, noCallbackTask));
when(executionDAOFacade.getWorkflowModel(workflowId, true)).thenReturn(workflow);
workflowExecutor.resetCallbacksForWorkflow(workflowId);
verify(queueDAO, times(1)).resetOffsetTime(anyString(), anyString());
}
@Test
public void testUpdateParentWorkflowTask() {
String parentWorkflowTaskId = "parent_workflow_task_id";
String workflowId = "workflow_id";
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setWorkflowId(workflowId);
subWorkflow.setParentWorkflowTaskId(parentWorkflowTaskId);
subWorkflow.setStatus(WorkflowModel.Status.COMPLETED);
TaskModel subWorkflowTask = new TaskModel();
subWorkflowTask.setSubWorkflowId(workflowId);
subWorkflowTask.setStatus(TaskModel.Status.IN_PROGRESS);
subWorkflowTask.setExternalOutputPayloadStoragePath(null);
when(executionDAOFacade.getTaskModel(parentWorkflowTaskId)).thenReturn(subWorkflowTask);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(subWorkflow);
workflowExecutor.updateParentWorkflowTask(subWorkflow);
ArgumentCaptor<TaskModel> argumentCaptor = ArgumentCaptor.forClass(TaskModel.class);
verify(executionDAOFacade, times(1)).updateTask(argumentCaptor.capture());
assertEquals(TaskModel.Status.COMPLETED, argumentCaptor.getAllValues().get(0).getStatus());
assertEquals(workflowId, argumentCaptor.getAllValues().get(0).getSubWorkflowId());
}
@Test
public void testScheduleNextIteration() {
WorkflowModel workflow = generateSampleWorkflow();
workflow.setTaskToDomain(
new HashMap<>() {
{
put("TEST", "domain1");
}
});
TaskModel loopTask = mock(TaskModel.class);
WorkflowTask loopWfTask = mock(WorkflowTask.class);
when(loopTask.getWorkflowTask()).thenReturn(loopWfTask);
List<WorkflowTask> loopOver =
new ArrayList<>() {
{
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_SIMPLE);
workflowTask.setName("TEST");
workflowTask.setTaskDefinition(new TaskDef());
add(workflowTask);
}
};
when(loopWfTask.getLoopOver()).thenReturn(loopOver);
workflowExecutor.scheduleNextIteration(loopTask, workflow);
verify(executionDAOFacade).getTaskPollDataByDomain("TEST", "domain1");
}
@Test
public void testCancelNonTerminalTasks() {
WorkflowDef def = new WorkflowDef();
def.setWorkflowStatusListenerEnabled(true);
WorkflowModel workflow = generateSampleWorkflow();
workflow.setWorkflowDefinition(def);
TaskModel subWorkflowTask = new TaskModel();
subWorkflowTask.setTaskId(UUID.randomUUID().toString());
subWorkflowTask.setTaskType(TaskType.SUB_WORKFLOW.name());
subWorkflowTask.setStatus(TaskModel.Status.IN_PROGRESS);
TaskModel lambdaTask = new TaskModel();
lambdaTask.setTaskId(UUID.randomUUID().toString());
lambdaTask.setTaskType(TaskType.LAMBDA.name());
lambdaTask.setStatus(TaskModel.Status.SCHEDULED);
TaskModel simpleTask = new TaskModel();
simpleTask.setTaskId(UUID.randomUUID().toString());
simpleTask.setTaskType(TaskType.SIMPLE.name());
simpleTask.setStatus(TaskModel.Status.COMPLETED);
workflow.getTasks().addAll(Arrays.asList(subWorkflowTask, lambdaTask, simpleTask));
List<String> erroredTasks = workflowExecutor.cancelNonTerminalTasks(workflow);
assertTrue(erroredTasks.isEmpty());
ArgumentCaptor<TaskModel> argumentCaptor = ArgumentCaptor.forClass(TaskModel.class);
verify(executionDAOFacade, times(2)).updateTask(argumentCaptor.capture());
assertEquals(2, argumentCaptor.getAllValues().size());
assertEquals(
TaskType.SUB_WORKFLOW.name(), argumentCaptor.getAllValues().get(0).getTaskType());
assertEquals(TaskModel.Status.CANCELED, argumentCaptor.getAllValues().get(0).getStatus());
assertEquals(TaskType.LAMBDA.name(), argumentCaptor.getAllValues().get(1).getTaskType());
assertEquals(TaskModel.Status.CANCELED, argumentCaptor.getAllValues().get(1).getStatus());
verify(workflowStatusListener, times(1))
.onWorkflowFinalizedIfEnabled(any(WorkflowModel.class));
}
@Test
public void testPauseWorkflow() {
when(executionLockService.acquireLock(anyString(), anyLong())).thenReturn(true);
doNothing().when(executionLockService).releaseLock(anyString());
String workflowId = "testPauseWorkflowId";
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
// if workflow is in terminal state
workflow.setStatus(WorkflowModel.Status.COMPLETED);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow);
try {
workflowExecutor.pauseWorkflow(workflowId);
fail("Expected " + ConflictException.class);
} catch (ConflictException e) {
verify(executionDAOFacade, never()).updateWorkflow(any(WorkflowModel.class));
verify(queueDAO, never()).remove(anyString(), anyString());
}
// if workflow is already PAUSED
workflow.setStatus(WorkflowModel.Status.PAUSED);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow);
workflowExecutor.pauseWorkflow(workflowId);
assertEquals(WorkflowModel.Status.PAUSED, workflow.getStatus());
verify(executionDAOFacade, never()).updateWorkflow(any(WorkflowModel.class));
verify(queueDAO, never()).remove(anyString(), anyString());
// if workflow is RUNNING
workflow.setStatus(WorkflowModel.Status.RUNNING);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow);
workflowExecutor.pauseWorkflow(workflowId);
assertEquals(WorkflowModel.Status.PAUSED, workflow.getStatus());
verify(executionDAOFacade, times(1)).updateWorkflow(any(WorkflowModel.class));
verify(queueDAO, times(1)).remove(anyString(), anyString());
}
@Test
public void testResumeWorkflow() {
String workflowId = "testResumeWorkflowId";
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
// if workflow is not in PAUSED state
workflow.setStatus(WorkflowModel.Status.COMPLETED);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow);
try {
workflowExecutor.resumeWorkflow(workflowId);
} catch (Exception e) {
assertTrue(e instanceof IllegalStateException);
verify(executionDAOFacade, never()).updateWorkflow(any(WorkflowModel.class));
verify(queueDAO, never()).push(anyString(), anyString(), anyInt(), anyLong());
}
// if workflow is in PAUSED state
workflow.setStatus(WorkflowModel.Status.PAUSED);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow);
workflowExecutor.resumeWorkflow(workflowId);
assertEquals(WorkflowModel.Status.RUNNING, workflow.getStatus());
assertTrue(workflow.getLastRetriedTime() > 0);
verify(executionDAOFacade, times(1)).updateWorkflow(any(WorkflowModel.class));
verify(queueDAO, times(1)).push(anyString(), anyString(), anyInt(), anyLong());
}
@Test
@SuppressWarnings("unchecked")
public void testTerminateWorkflowWithFailureWorkflow() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("workflow");
workflowDef.setFailureWorkflow("failure_workflow");
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("1");
workflow.setCorrelationId("testid");
workflow.setWorkflowDefinition(new WorkflowDef());
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setOwnerApp("junit_test");
workflow.setEndTime(100L);
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setWorkflowDefinition(workflowDef);
TaskModel successTask = new TaskModel();
successTask.setTaskId("taskid1");
successTask.setReferenceTaskName("success");
successTask.setStatus(TaskModel.Status.COMPLETED);
TaskModel failedTask = new TaskModel();
failedTask.setTaskId("taskid2");
failedTask.setReferenceTaskName("failed");
failedTask.setStatus(TaskModel.Status.FAILED);
workflow.getTasks().addAll(Arrays.asList(successTask, failedTask));
WorkflowDef failureWorkflowDef = new WorkflowDef();
failureWorkflowDef.setName("failure_workflow");
when(metadataDAO.getLatestWorkflowDef(failureWorkflowDef.getName()))
.thenReturn(Optional.of(failureWorkflowDef));
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
when(executionLockService.acquireLock(anyString())).thenReturn(true);
workflowExecutor.decide(workflow.getWorkflowId());
assertEquals(WorkflowModel.Status.FAILED, workflow.getStatus());
ArgumentCaptor<WorkflowCreationEvent> argumentCaptor =
ArgumentCaptor.forClass(WorkflowCreationEvent.class);
verify(eventPublisher, times(1)).publishEvent(argumentCaptor.capture());
StartWorkflowInput startWorkflowInput = argumentCaptor.getValue().getStartWorkflowInput();
assertEquals(workflow.getCorrelationId(), startWorkflowInput.getCorrelationId());
assertEquals(
workflow.getWorkflowId(), startWorkflowInput.getWorkflowInput().get("workflowId"));
assertEquals(
failedTask.getTaskId(), startWorkflowInput.getWorkflowInput().get("failureTaskId"));
assertNotNull(
failedTask.getTaskId(),
startWorkflowInput.getWorkflowInput().get("failedWorkflow"));
}
@Test
public void testRerunOptionalSubWorkflow() {
IDGenerator idGenerator = new IDGenerator();
// setup
String parentWorkflowId = idGenerator.generate();
String subWorkflowId = idGenerator.generate();
// sub workflow setup
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.SIMPLE.name());
task1.setTaskDefName("task1");
task1.setReferenceTaskName("task1_ref");
task1.setWorkflowInstanceId(subWorkflowId);
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setStatus(TaskModel.Status.COMPLETED);
task1.setWorkflowTask(new WorkflowTask());
task1.setOutputData(new HashMap<>());
TaskModel task2 = new TaskModel();
task2.setTaskType(TaskType.SIMPLE.name());
task2.setTaskDefName("task2");
task2.setReferenceTaskName("task2_ref");
task2.setWorkflowInstanceId(subWorkflowId);
task2.setScheduledTime(System.currentTimeMillis());
task2.setTaskId(idGenerator.generate());
task2.setStatus(TaskModel.Status.FAILED);
task2.setWorkflowTask(new WorkflowTask());
task2.setOutputData(new HashMap<>());
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setParentWorkflowId(parentWorkflowId);
subWorkflow.setWorkflowId(subWorkflowId);
WorkflowDef subworkflowDef = new WorkflowDef();
subworkflowDef.setName("subworkflow");
subworkflowDef.setVersion(1);
subWorkflow.setWorkflowDefinition(subworkflowDef);
subWorkflow.setOwnerApp("junit_testRerunWorkflowId");
subWorkflow.setStatus(WorkflowModel.Status.FAILED);
subWorkflow.getTasks().addAll(Arrays.asList(task1, task2));
// parent workflow setup
TaskModel task = new TaskModel();
task.setWorkflowInstanceId(parentWorkflowId);
task.setScheduledTime(System.currentTimeMillis());
task.setTaskId(idGenerator.generate());
task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS);
task.setOutputData(new HashMap<>());
task.setSubWorkflowId(subWorkflowId);
task.setTaskType(TaskType.SUB_WORKFLOW.name());
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setOptional(true);
task.setWorkflowTask(workflowTask);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(parentWorkflowId);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("parentworkflow");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setStatus(WorkflowModel.Status.COMPLETED);
workflow.getTasks().addAll(Arrays.asList(task));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true))
.thenReturn(subWorkflow);
when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId()))
.thenReturn(task);
when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false))
.thenReturn(workflow);
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest();
rerunWorkflowRequest.setReRunFromWorkflowId(subWorkflow.getWorkflowId());
workflowExecutor.rerun(rerunWorkflowRequest);
// then: parent workflow remains the same
assertEquals(WorkflowModel.Status.FAILED, subWorkflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus());
assertEquals(TaskModel.Status.COMPLETED_WITH_ERRORS, task.getStatus());
assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus());
}
@Test
public void testRestartOptionalSubWorkflow() {
IDGenerator idGenerator = new IDGenerator();
// setup
String parentWorkflowId = idGenerator.generate();
String subWorkflowId = idGenerator.generate();
// sub workflow setup
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.SIMPLE.name());
task1.setTaskDefName("task1");
task1.setReferenceTaskName("task1_ref");
task1.setWorkflowInstanceId(subWorkflowId);
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setStatus(TaskModel.Status.COMPLETED);
task1.setWorkflowTask(new WorkflowTask());
task1.setOutputData(new HashMap<>());
TaskModel task2 = new TaskModel();
task2.setTaskType(TaskType.SIMPLE.name());
task2.setTaskDefName("task2");
task2.setReferenceTaskName("task2_ref");
task2.setWorkflowInstanceId(subWorkflowId);
task2.setScheduledTime(System.currentTimeMillis());
task2.setTaskId(idGenerator.generate());
task2.setStatus(TaskModel.Status.FAILED);
task2.setWorkflowTask(new WorkflowTask());
task2.setOutputData(new HashMap<>());
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setParentWorkflowId(parentWorkflowId);
subWorkflow.setWorkflowId(subWorkflowId);
WorkflowDef subworkflowDef = new WorkflowDef();
subworkflowDef.setName("subworkflow");
subworkflowDef.setVersion(1);
subWorkflow.setWorkflowDefinition(subworkflowDef);
subWorkflow.setOwnerApp("junit_testRerunWorkflowId");
subWorkflow.setStatus(WorkflowModel.Status.FAILED);
subWorkflow.getTasks().addAll(Arrays.asList(task1, task2));
// parent workflow setup
TaskModel task = new TaskModel();
task.setWorkflowInstanceId(parentWorkflowId);
task.setScheduledTime(System.currentTimeMillis());
task.setTaskId(idGenerator.generate());
task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS);
task.setOutputData(new HashMap<>());
task.setSubWorkflowId(subWorkflowId);
task.setTaskType(TaskType.SUB_WORKFLOW.name());
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setOptional(true);
task.setWorkflowTask(workflowTask);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(parentWorkflowId);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("parentworkflow");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setStatus(WorkflowModel.Status.COMPLETED);
workflow.getTasks().addAll(Arrays.asList(task));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true))
.thenReturn(subWorkflow);
when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId()))
.thenReturn(task);
when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false))
.thenReturn(workflow);
workflowExecutor.restart(subWorkflowId, false);
// then: parent workflow remains the same
assertEquals(WorkflowModel.Status.FAILED, subWorkflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus());
assertEquals(TaskModel.Status.COMPLETED_WITH_ERRORS, task.getStatus());
assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus());
}
@Test
public void testRetryOptionalSubWorkflow() {
IDGenerator idGenerator = new IDGenerator();
// setup
String parentWorkflowId = idGenerator.generate();
String subWorkflowId = idGenerator.generate();
// sub workflow setup
TaskModel task1 = new TaskModel();
task1.setTaskType(TaskType.SIMPLE.name());
task1.setTaskDefName("task1");
task1.setReferenceTaskName("task1_ref");
task1.setWorkflowInstanceId(subWorkflowId);
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(idGenerator.generate());
task1.setStatus(TaskModel.Status.COMPLETED);
task1.setWorkflowTask(new WorkflowTask());
task1.setOutputData(new HashMap<>());
TaskModel task2 = new TaskModel();
task2.setTaskType(TaskType.SIMPLE.name());
task2.setTaskDefName("task2");
task2.setReferenceTaskName("task2_ref");
task2.setWorkflowInstanceId(subWorkflowId);
task2.setScheduledTime(System.currentTimeMillis());
task2.setTaskId(idGenerator.generate());
task2.setStatus(TaskModel.Status.FAILED);
task2.setWorkflowTask(new WorkflowTask());
task2.setOutputData(new HashMap<>());
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setParentWorkflowId(parentWorkflowId);
subWorkflow.setWorkflowId(subWorkflowId);
WorkflowDef subworkflowDef = new WorkflowDef();
subworkflowDef.setName("subworkflow");
subworkflowDef.setVersion(1);
subWorkflow.setWorkflowDefinition(subworkflowDef);
subWorkflow.setOwnerApp("junit_testRerunWorkflowId");
subWorkflow.setStatus(WorkflowModel.Status.FAILED);
subWorkflow.getTasks().addAll(Arrays.asList(task1, task2));
// parent workflow setup
TaskModel task = new TaskModel();
task.setWorkflowInstanceId(parentWorkflowId);
task.setScheduledTime(System.currentTimeMillis());
task.setTaskId(idGenerator.generate());
task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS);
task.setOutputData(new HashMap<>());
task.setSubWorkflowId(subWorkflowId);
task.setTaskType(TaskType.SUB_WORKFLOW.name());
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setOptional(true);
task.setWorkflowTask(workflowTask);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(parentWorkflowId);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("parentworkflow");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRerunWorkflowId");
workflow.setStatus(WorkflowModel.Status.COMPLETED);
workflow.getTasks().addAll(Arrays.asList(task));
// end of setup
// when:
when(executionDAOFacade.getWorkflowModel(workflow.getWorkflowId(), true))
.thenReturn(workflow);
when(executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true))
.thenReturn(subWorkflow);
when(executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId()))
.thenReturn(task);
when(executionDAOFacade.getWorkflowModel(subWorkflow.getParentWorkflowId(), false))
.thenReturn(workflow);
workflowExecutor.retry(subWorkflowId, true);
// then: parent workflow remains the same
assertEquals(WorkflowModel.Status.FAILED, subWorkflow.getPreviousStatus());
assertEquals(WorkflowModel.Status.RUNNING, subWorkflow.getStatus());
assertEquals(TaskModel.Status.COMPLETED_WITH_ERRORS, task.getStatus());
assertEquals(WorkflowModel.Status.COMPLETED, workflow.getStatus());
}
@Test
public void testUpdateTaskWithCallbackAfterSeconds() {
String workflowId = "test-workflow-id";
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setWorkflowDefinition(new WorkflowDef());
TaskModel simpleTask = new TaskModel();
simpleTask.setTaskType(TaskType.SIMPLE.name());
simpleTask.setReferenceTaskName("simpleTask");
simpleTask.setWorkflowInstanceId(workflowId);
simpleTask.setScheduledTime(System.currentTimeMillis());
simpleTask.setCallbackAfterSeconds(0);
simpleTask.setTaskId("simple-task-id");
simpleTask.setStatus(TaskModel.Status.IN_PROGRESS);
workflow.getTasks().add(simpleTask);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow);
when(executionDAOFacade.getTaskModel(simpleTask.getTaskId())).thenReturn(simpleTask);
TaskResult taskResult = new TaskResult();
taskResult.setWorkflowInstanceId(workflowId);
taskResult.setTaskId(simpleTask.getTaskId());
taskResult.setWorkerId("test-worker-id");
taskResult.log("not ready yet");
taskResult.setCallbackAfterSeconds(300);
taskResult.setStatus(TaskResult.Status.IN_PROGRESS);
workflowExecutor.updateTask(taskResult);
verify(queueDAO, times(1)).postpone(anyString(), anyString(), anyInt(), anyLong());
ArgumentCaptor<TaskModel> argumentCaptor = ArgumentCaptor.forClass(TaskModel.class);
verify(executionDAOFacade, times(1)).updateTask(argumentCaptor.capture());
assertEquals(TaskModel.Status.SCHEDULED, argumentCaptor.getAllValues().get(0).getStatus());
assertEquals(
taskResult.getCallbackAfterSeconds(),
argumentCaptor.getAllValues().get(0).getCallbackAfterSeconds());
assertEquals(taskResult.getWorkerId(), argumentCaptor.getAllValues().get(0).getWorkerId());
}
@Test
public void testUpdateTaskWithOutCallbackAfterSeconds() {
String workflowId = "test-workflow-id";
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setWorkflowDefinition(new WorkflowDef());
TaskModel simpleTask = new TaskModel();
simpleTask.setTaskType(TaskType.SIMPLE.name());
simpleTask.setReferenceTaskName("simpleTask");
simpleTask.setWorkflowInstanceId(workflowId);
simpleTask.setScheduledTime(System.currentTimeMillis());
simpleTask.setCallbackAfterSeconds(0);
simpleTask.setTaskId("simple-task-id");
simpleTask.setStatus(TaskModel.Status.IN_PROGRESS);
workflow.getTasks().add(simpleTask);
when(executionDAOFacade.getWorkflowModel(workflowId, false)).thenReturn(workflow);
when(executionDAOFacade.getTaskModel(simpleTask.getTaskId())).thenReturn(simpleTask);
TaskResult taskResult = new TaskResult();
taskResult.setWorkflowInstanceId(workflowId);
taskResult.setTaskId(simpleTask.getTaskId());
taskResult.setWorkerId("test-worker-id");
taskResult.log("not ready yet");
taskResult.setStatus(TaskResult.Status.IN_PROGRESS);
workflowExecutor.updateTask(taskResult);
verify(queueDAO, times(1)).postpone(anyString(), anyString(), anyInt(), anyLong());
ArgumentCaptor<TaskModel> argumentCaptor = ArgumentCaptor.forClass(TaskModel.class);
verify(executionDAOFacade, times(1)).updateTask(argumentCaptor.capture());
assertEquals(TaskModel.Status.SCHEDULED, argumentCaptor.getAllValues().get(0).getStatus());
assertEquals(0, argumentCaptor.getAllValues().get(0).getCallbackAfterSeconds());
assertEquals(taskResult.getWorkerId(), argumentCaptor.getAllValues().get(0).getWorkerId());
}
@Test
public void testIsLazyEvaluateWorkflow() {
// setup
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("lazyEvaluate");
workflowDef.setVersion(1);
WorkflowTask simpleTask = new WorkflowTask();
simpleTask.setType(SIMPLE.name());
simpleTask.setName("simple");
simpleTask.setTaskReferenceName("simple");
WorkflowTask forkTask = new WorkflowTask();
forkTask.setType(FORK_JOIN.name());
forkTask.setName("fork");
forkTask.setTaskReferenceName("fork");
WorkflowTask branchTask1 = new WorkflowTask();
branchTask1.setType(SIMPLE.name());
branchTask1.setName("branchTask1");
branchTask1.setTaskReferenceName("branchTask1");
WorkflowTask branchTask2 = new WorkflowTask();
branchTask2.setType(SIMPLE.name());
branchTask2.setName("branchTask2");
branchTask2.setTaskReferenceName("branchTask2");
forkTask.getForkTasks().add(Arrays.asList(branchTask1, branchTask2));
WorkflowTask joinTask = new WorkflowTask();
joinTask.setType(JOIN.name());
joinTask.setName("join");
joinTask.setTaskReferenceName("join");
joinTask.setJoinOn(List.of("branchTask2"));
WorkflowTask doWhile = new WorkflowTask();
doWhile.setType(DO_WHILE.name());
doWhile.setName("doWhile");
doWhile.setTaskReferenceName("doWhile");
WorkflowTask loopTask = new WorkflowTask();
loopTask.setType(SIMPLE.name());
loopTask.setName("loopTask");
loopTask.setTaskReferenceName("loopTask");
doWhile.setLoopOver(List.of(loopTask));
workflowDef.getTasks().addAll(List.of(simpleTask, forkTask, joinTask, doWhile));
TaskModel task = new TaskModel();
task.setStatus(TaskModel.Status.COMPLETED);
// when:
task.setReferenceTaskName("dynamic");
assertTrue(workflowExecutor.isLazyEvaluateWorkflow(workflowDef, task));
task.setReferenceTaskName("branchTask1");
assertFalse(workflowExecutor.isLazyEvaluateWorkflow(workflowDef, task));
task.setReferenceTaskName("branchTask2");
assertTrue(workflowExecutor.isLazyEvaluateWorkflow(workflowDef, task));
task.setReferenceTaskName("simple");
assertFalse(workflowExecutor.isLazyEvaluateWorkflow(workflowDef, task));
task.setReferenceTaskName("loopTask__1");
task.setIteration(1);
assertFalse(workflowExecutor.isLazyEvaluateWorkflow(workflowDef, task));
task.setReferenceTaskName("branchTask1");
task.setStatus(TaskModel.Status.FAILED);
assertFalse(workflowExecutor.isLazyEvaluateWorkflow(workflowDef, task));
}
@Test
public void testTaskExtendLease() {
TaskModel simpleTask = new TaskModel();
simpleTask.setTaskType(TaskType.SIMPLE.name());
simpleTask.setReferenceTaskName("simpleTask");
simpleTask.setWorkflowInstanceId("test-workflow-id");
simpleTask.setScheduledTime(System.currentTimeMillis());
simpleTask.setCallbackAfterSeconds(0);
simpleTask.setTaskId("simple-task-id");
simpleTask.setStatus(TaskModel.Status.IN_PROGRESS);
when(executionDAOFacade.getTaskModel(simpleTask.getTaskId())).thenReturn(simpleTask);
TaskResult taskResult = new TaskResult();
taskResult.setWorkflowInstanceId(simpleTask.getWorkflowInstanceId());
taskResult.setTaskId(simpleTask.getTaskId());
taskResult.log("extend lease");
taskResult.setExtendLease(true);
workflowExecutor.updateTask(taskResult);
verify(executionDAOFacade, times(1)).extendLease(simpleTask);
verify(queueDAO, times(0)).postpone(anyString(), anyString(), anyInt(), anyLong());
verify(executionDAOFacade, times(0)).updateTask(any());
}
private WorkflowModel generateSampleWorkflow() {
// setup
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("testRetryWorkflowId");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testRetryWorkflowId");
workflowDef.setVersion(1);
workflow.setWorkflowDefinition(workflowDef);
workflow.setOwnerApp("junit_testRetryWorkflowId");
workflow.setCreateTime(10L);
workflow.setEndTime(100L);
//noinspection unchecked
workflow.setOutput(Collections.EMPTY_MAP);
workflow.setStatus(WorkflowModel.Status.FAILED);
return workflow;
}
private List<TaskModel> generateSampleTasks(int count) {
if (count == 0) {
return null;
}
List<TaskModel> tasks = new ArrayList<>();
for (int i = 0; i < count; i++) {
TaskModel task = new TaskModel();
task.setTaskId(UUID.randomUUID().toString());
task.setSeq(i);
task.setRetryCount(1);
task.setTaskType("task" + (i + 1));
task.setStatus(TaskModel.Status.COMPLETED);
task.setTaskDefName("taskX");
task.setReferenceTaskName("task_ref" + (i + 1));
tasks.add(task);
}
return tasks;
}
}
| 6,601 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.io.InputStream;
import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.unit.DataSize;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.mapper.DecisionTaskMapper;
import com.netflix.conductor.core.execution.mapper.DynamicTaskMapper;
import com.netflix.conductor.core.execution.mapper.EventTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.HTTPTaskMapper;
import com.netflix.conductor.core.execution.mapper.JoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper;
import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper;
import com.netflix.conductor.core.execution.mapper.SwitchTaskMapper;
import com.netflix.conductor.core.execution.mapper.TaskMapper;
import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper;
import com.netflix.conductor.core.execution.mapper.WaitTaskMapper;
import com.netflix.conductor.core.execution.tasks.Decision;
import com.netflix.conductor.core.execution.tasks.Join;
import com.netflix.conductor.core.execution.tasks.Switch;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.DECISION;
import static com.netflix.conductor.common.metadata.tasks.TaskType.DYNAMIC;
import static com.netflix.conductor.common.metadata.tasks.TaskType.EVENT;
import static com.netflix.conductor.common.metadata.tasks.TaskType.FORK_JOIN;
import static com.netflix.conductor.common.metadata.tasks.TaskType.FORK_JOIN_DYNAMIC;
import static com.netflix.conductor.common.metadata.tasks.TaskType.HTTP;
import static com.netflix.conductor.common.metadata.tasks.TaskType.JOIN;
import static com.netflix.conductor.common.metadata.tasks.TaskType.SIMPLE;
import static com.netflix.conductor.common.metadata.tasks.TaskType.SUB_WORKFLOW;
import static com.netflix.conductor.common.metadata.tasks.TaskType.SWITCH;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DECISION;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SWITCH;
import static com.netflix.conductor.common.metadata.tasks.TaskType.USER_DEFINED;
import static com.netflix.conductor.common.metadata.tasks.TaskType.WAIT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ContextConfiguration(
classes = {
TestObjectMapperConfiguration.class,
TestDeciderOutcomes.TestConfiguration.class
})
@RunWith(SpringRunner.class)
public class TestDeciderOutcomes {
private DeciderService deciderService;
@Autowired private Map<String, Evaluator> evaluators;
@Autowired private ObjectMapper objectMapper;
@Autowired private SystemTaskRegistry systemTaskRegistry;
@Configuration
@ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans.
public static class TestConfiguration {
@Bean(TASK_TYPE_DECISION)
public Decision decision() {
return new Decision();
}
@Bean(TASK_TYPE_SWITCH)
public Switch switchTask() {
return new Switch();
}
@Bean(TASK_TYPE_JOIN)
public Join join() {
return new Join();
}
@Bean
public SystemTaskRegistry systemTaskRegistry(Set<WorkflowSystemTask> tasks) {
return new SystemTaskRegistry(tasks);
}
}
@Before
public void init() {
MetadataDAO metadataDAO = mock(MetadataDAO.class);
ExternalPayloadStorageUtils externalPayloadStorageUtils =
mock(ExternalPayloadStorageUtils.class);
ConductorProperties properties = mock(ConductorProperties.class);
when(properties.getTaskInputPayloadSizeThreshold()).thenReturn(DataSize.ofKilobytes(10L));
when(properties.getMaxTaskInputPayloadSizeThreshold())
.thenReturn(DataSize.ofKilobytes(10240L));
TaskDef taskDef = new TaskDef();
taskDef.setRetryCount(1);
taskDef.setName("mockTaskDef");
taskDef.setResponseTimeoutSeconds(60 * 60);
when(metadataDAO.getTaskDef(anyString())).thenReturn(taskDef);
ParametersUtils parametersUtils = new ParametersUtils(objectMapper);
Map<String, TaskMapper> taskMappers = new HashMap<>();
taskMappers.put(DECISION.name(), new DecisionTaskMapper());
taskMappers.put(SWITCH.name(), new SwitchTaskMapper(evaluators));
taskMappers.put(DYNAMIC.name(), new DynamicTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(FORK_JOIN.name(), new ForkJoinTaskMapper());
taskMappers.put(JOIN.name(), new JoinTaskMapper());
taskMappers.put(
FORK_JOIN_DYNAMIC.name(),
new ForkJoinDynamicTaskMapper(
new IDGenerator(), parametersUtils, objectMapper, metadataDAO));
taskMappers.put(
USER_DEFINED.name(), new UserDefinedTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(SIMPLE.name(), new SimpleTaskMapper(parametersUtils));
taskMappers.put(
SUB_WORKFLOW.name(), new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
taskMappers.put(EVENT.name(), new EventTaskMapper(parametersUtils));
taskMappers.put(WAIT.name(), new WaitTaskMapper(parametersUtils));
taskMappers.put(HTTP.name(), new HTTPTaskMapper(parametersUtils, metadataDAO));
this.deciderService =
new DeciderService(
new IDGenerator(),
parametersUtils,
metadataDAO,
externalPayloadStorageUtils,
systemTaskRegistry,
taskMappers,
Duration.ofMinutes(60));
}
@Test
public void testWorkflowWithNoTasks() throws Exception {
InputStream stream = new ClassPathResource("./conditional_flow.json").getInputStream();
WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class);
assertNotNull(def);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setCreateTime(0L);
workflow.getInput().put("param1", "nested");
workflow.getInput().put("param2", "one");
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertFalse(outcome.isComplete);
assertTrue(outcome.tasksToBeUpdated.isEmpty());
assertEquals(3, outcome.tasksToBeScheduled.size());
outcome.tasksToBeScheduled.forEach(t -> t.setStatus(TaskModel.Status.COMPLETED));
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
outcome = deciderService.decide(workflow);
assertFalse(outcome.isComplete);
assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size());
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName());
}
@Test
public void testWorkflowWithNoTasksWithSwitch() throws Exception {
InputStream stream =
new ClassPathResource("./conditional_flow_with_switch.json").getInputStream();
WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class);
assertNotNull(def);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setCreateTime(0L);
workflow.getInput().put("param1", "nested");
workflow.getInput().put("param2", "one");
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertFalse(outcome.isComplete);
assertTrue(outcome.tasksToBeUpdated.isEmpty());
assertEquals(3, outcome.tasksToBeScheduled.size());
outcome.tasksToBeScheduled.forEach(t -> t.setStatus(TaskModel.Status.COMPLETED));
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
outcome = deciderService.decide(workflow);
assertFalse(outcome.isComplete);
assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size());
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName());
}
@Test
public void testRetries() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("test_task");
workflowTask.setType("USER_TASK");
workflowTask.setTaskReferenceName("t0");
workflowTask.getInputParameters().put("taskId", "${CPEWF_TASK_ID}");
workflowTask.getInputParameters().put("requestId", "${workflow.input.requestId}");
workflowTask.setTaskDefinition(new TaskDef("test_task"));
def.getTasks().add(workflowTask);
def.setSchemaVersion(2);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.getInput().put("requestId", 123);
workflow.setCreateTime(System.currentTimeMillis());
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(
workflowTask.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId();
assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId"));
assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId"));
outcome.tasksToBeScheduled.get(0).setStatus(TaskModel.Status.FAILED);
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(1, outcome.tasksToBeUpdated.size());
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId());
assertNotSame(task1Id, outcome.tasksToBeScheduled.get(0).getTaskId());
assertEquals(
outcome.tasksToBeScheduled.get(0).getTaskId(),
outcome.tasksToBeScheduled.get(0).getInputData().get("taskId"));
assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getRetriedTaskId());
assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId"));
WorkflowTask fork = new WorkflowTask();
fork.setName("fork0");
fork.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC);
fork.setTaskReferenceName("fork0");
fork.setDynamicForkTasksInputParamName("forkedInputs");
fork.setDynamicForkTasksParam("forks");
fork.getInputParameters().put("forks", "${workflow.input.forks}");
fork.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}");
WorkflowTask join = new WorkflowTask();
join.setName("join0");
join.setType("JOIN");
join.setTaskReferenceName("join0");
def.getTasks().clear();
def.getTasks().add(fork);
def.getTasks().add(join);
List<WorkflowTask> forks = new LinkedList<>();
Map<String, Map<String, Object>> forkedInputs = new HashMap<>();
for (int i = 0; i < 1; i++) {
WorkflowTask wft = new WorkflowTask();
wft.setName("f" + i);
wft.setTaskReferenceName("f" + i);
wft.setWorkflowTaskType(TaskType.SIMPLE);
wft.getInputParameters().put("requestId", "${workflow.input.requestId}");
wft.getInputParameters().put("taskId", "${CPEWF_TASK_ID}");
wft.setTaskDefinition(new TaskDef("f" + i));
forks.add(wft);
Map<String, Object> input = new HashMap<>();
input.put("k", "v");
input.put("k1", 1);
forkedInputs.put(wft.getTaskReferenceName(), input);
}
workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.getInput().put("requestId", 123);
workflow.setCreateTime(System.currentTimeMillis());
workflow.getInput().put("forks", forks);
workflow.getInput().put("forkedInputs", forkedInputs);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(3, outcome.tasksToBeScheduled.size());
assertEquals(0, outcome.tasksToBeUpdated.size());
assertEquals("v", outcome.tasksToBeScheduled.get(1).getInputData().get("k"));
assertEquals(1, outcome.tasksToBeScheduled.get(1).getInputData().get("k1"));
assertEquals(
outcome.tasksToBeScheduled.get(1).getTaskId(),
outcome.tasksToBeScheduled.get(1).getInputData().get("taskId"));
task1Id = outcome.tasksToBeScheduled.get(1).getTaskId();
outcome.tasksToBeScheduled.get(1).setStatus(TaskModel.Status.FAILED);
for (TaskModel taskToBeScheduled : outcome.tasksToBeScheduled) {
taskToBeScheduled.setUpdateTime(System.currentTimeMillis());
}
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
outcome = deciderService.decide(workflow);
assertTrue(
outcome.tasksToBeScheduled.stream()
.anyMatch(task1 -> task1.getReferenceTaskName().equals("f0")));
Optional<TaskModel> optionalTask =
outcome.tasksToBeScheduled.stream()
.filter(t -> t.getReferenceTaskName().equals("f0"))
.findFirst();
assertTrue(optionalTask.isPresent());
TaskModel task = optionalTask.get();
assertEquals("v", task.getInputData().get("k"));
assertEquals(1, task.getInputData().get("k1"));
assertEquals(task.getTaskId(), task.getInputData().get("taskId"));
assertNotSame(task1Id, task.getTaskId());
assertEquals(task1Id, task.getRetriedTaskId());
}
@Test
public void testOptional() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowTask task1 = new WorkflowTask();
task1.setName("task0");
task1.setType("SIMPLE");
task1.setTaskReferenceName("t0");
task1.getInputParameters().put("taskId", "${CPEWF_TASK_ID}");
task1.setOptional(true);
task1.setTaskDefinition(new TaskDef("task0"));
WorkflowTask task2 = new WorkflowTask();
task2.setName("task1");
task2.setType("SIMPLE");
task2.setTaskReferenceName("t1");
task2.setTaskDefinition(new TaskDef("task1"));
def.getTasks().add(task1);
def.getTasks().add(task2);
def.setSchemaVersion(2);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setCreateTime(System.currentTimeMillis());
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(
task1.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
for (int i = 0; i < 3; i++) {
String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId();
assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId"));
workflow.getTasks().clear();
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
workflow.getTasks().get(0).setStatus(TaskModel.Status.FAILED);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(1, outcome.tasksToBeUpdated.size());
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(TaskModel.Status.FAILED, workflow.getTasks().get(0).getStatus());
assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId());
assertEquals(
task1.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
assertEquals(i + 1, outcome.tasksToBeScheduled.get(0).getRetryCount());
}
String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId();
workflow.getTasks().clear();
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
workflow.getTasks().get(0).setStatus(TaskModel.Status.FAILED);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(1, outcome.tasksToBeUpdated.size());
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(
TaskModel.Status.COMPLETED_WITH_ERRORS, workflow.getTasks().get(0).getStatus());
assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId());
assertEquals(
task2.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
}
@Test
public void testOptionalWithDynamicFork() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowTask task1 = new WorkflowTask();
task1.setName("fork0");
task1.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC);
task1.setTaskReferenceName("fork0");
task1.setDynamicForkTasksInputParamName("forkedInputs");
task1.setDynamicForkTasksParam("forks");
task1.getInputParameters().put("forks", "${workflow.input.forks}");
task1.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}");
WorkflowTask task2 = new WorkflowTask();
task2.setName("join0");
task2.setType("JOIN");
task2.setTaskReferenceName("join0");
def.getTasks().add(task1);
def.getTasks().add(task2);
def.setSchemaVersion(2);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
List<WorkflowTask> forks = new LinkedList<>();
Map<String, Map<String, Object>> forkedInputs = new HashMap<>();
for (int i = 0; i < 3; i++) {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("f" + i);
workflowTask.setTaskReferenceName("f" + i);
workflowTask.setWorkflowTaskType(TaskType.SIMPLE);
workflowTask.setOptional(true);
workflowTask.setTaskDefinition(new TaskDef("f" + i));
forks.add(workflowTask);
forkedInputs.put(workflowTask.getTaskReferenceName(), new HashMap<>());
}
workflow.getInput().put("forks", forks);
workflow.getInput().put("forkedInputs", forkedInputs);
workflow.setCreateTime(System.currentTimeMillis());
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(5, outcome.tasksToBeScheduled.size());
assertEquals(0, outcome.tasksToBeUpdated.size());
assertEquals(TASK_TYPE_FORK, outcome.tasksToBeScheduled.get(0).getTaskType());
assertEquals(TaskModel.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus());
for (int retryCount = 0; retryCount < 4; retryCount++) {
for (TaskModel taskToBeScheduled : outcome.tasksToBeScheduled) {
if (taskToBeScheduled.getTaskDefName().equals("join0")) {
assertEquals(TaskModel.Status.IN_PROGRESS, taskToBeScheduled.getStatus());
} else if (taskToBeScheduled.getTaskType().matches("(f0|f1|f2)")) {
assertEquals(TaskModel.Status.SCHEDULED, taskToBeScheduled.getStatus());
taskToBeScheduled.setStatus(TaskModel.Status.FAILED);
}
taskToBeScheduled.setUpdateTime(System.currentTimeMillis());
}
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
}
assertEquals(TASK_TYPE_JOIN, outcome.tasksToBeScheduled.get(0).getTaskType());
for (int i = 0; i < 3; i++) {
assertEquals(
TaskModel.Status.COMPLETED_WITH_ERRORS,
outcome.tasksToBeUpdated.get(i).getStatus());
assertEquals("f" + (i), outcome.tasksToBeUpdated.get(i).getTaskDefName());
}
assertEquals(TaskModel.Status.IN_PROGRESS, outcome.tasksToBeScheduled.get(0).getStatus());
new Join().execute(workflow, outcome.tasksToBeScheduled.get(0), null);
assertEquals(
TaskModel.Status.COMPLETED_WITH_ERRORS,
outcome.tasksToBeScheduled.get(0).getStatus());
}
@Test
public void testDecisionCases() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowTask even = new WorkflowTask();
even.setName("even");
even.setType("SIMPLE");
even.setTaskReferenceName("even");
even.setTaskDefinition(new TaskDef("even"));
WorkflowTask odd = new WorkflowTask();
odd.setName("odd");
odd.setType("SIMPLE");
odd.setTaskReferenceName("odd");
odd.setTaskDefinition(new TaskDef("odd"));
WorkflowTask defaultt = new WorkflowTask();
defaultt.setName("defaultt");
defaultt.setType("SIMPLE");
defaultt.setTaskReferenceName("defaultt");
defaultt.setTaskDefinition(new TaskDef("defaultt"));
WorkflowTask decide = new WorkflowTask();
decide.setName("decide");
decide.setWorkflowTaskType(TaskType.DECISION);
decide.setTaskReferenceName("d0");
decide.getInputParameters().put("Id", "${workflow.input.Id}");
decide.getInputParameters().put("location", "${workflow.input.location}");
decide.setCaseExpression(
"if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0) || $.location == 'usa') 'even'; else 'odd'; ");
decide.getDecisionCases().put("even", Collections.singletonList(even));
decide.getDecisionCases().put("odd", Collections.singletonList(odd));
decide.setDefaultCase(Collections.singletonList(defaultt));
def.getTasks().add(decide);
def.setSchemaVersion(2);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setCreateTime(System.currentTimeMillis());
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(2, outcome.tasksToBeScheduled.size());
assertEquals(
decide.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
assertEquals(
defaultt.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); // default
assertEquals(
Collections.singletonList("bad input"),
outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput"));
workflow.getInput().put("Id", 9);
workflow.getInput().put("location", "usa");
outcome = deciderService.decide(workflow);
assertEquals(2, outcome.tasksToBeScheduled.size());
assertEquals(
decide.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
assertEquals(
even.getTaskReferenceName(),
outcome.tasksToBeScheduled
.get(1)
.getReferenceTaskName()); // even because of location == usa
assertEquals(
Collections.singletonList("even"),
outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput"));
workflow.getInput().put("Id", 9);
workflow.getInput().put("location", "canada");
outcome = deciderService.decide(workflow);
assertEquals(2, outcome.tasksToBeScheduled.size());
assertEquals(
decide.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
assertEquals(
odd.getTaskReferenceName(),
outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); // odd
assertEquals(
Collections.singletonList("odd"),
outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput"));
}
}
| 6,602 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/WorkflowSystemTaskStub.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
public class WorkflowSystemTaskStub extends WorkflowSystemTask {
private boolean started = false;
public WorkflowSystemTaskStub(String taskType) {
super(taskType);
}
@Override
public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) {
started = true;
task.setStatus(TaskModel.Status.COMPLETED);
super.start(workflow, task, executor);
}
public boolean isStarted() {
return started;
}
}
| 6,603 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestLambda.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
/**
* @author x-ultra
*/
public class TestLambda {
private final WorkflowModel workflow = new WorkflowModel();
private final WorkflowExecutor executor = mock(WorkflowExecutor.class);
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
public void start() {
Lambda lambda = new Lambda();
Map inputObj = new HashMap();
inputObj.put("a", 1);
// test for scriptExpression == null
TaskModel task = new TaskModel();
task.getInputData().put("input", inputObj);
lambda.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
// test for normal
task = new TaskModel();
task.getInputData().put("input", inputObj);
task.getInputData().put("scriptExpression", "if ($.input.a==1){return 1}else{return 0 } ");
lambda.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertEquals(task.getOutputData().toString(), "{result=1}");
// test for scriptExpression ScriptException
task = new TaskModel();
task.getInputData().put("input", inputObj);
task.getInputData().put("scriptExpression", "if ($.a.size==1){return 1}else{return 0 } ");
lambda.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
}
| 6,604 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/EventQueueResolutionTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.events.EventQueueProvider;
import com.netflix.conductor.core.events.EventQueues;
import com.netflix.conductor.core.events.MockQueueProvider;
import com.netflix.conductor.core.events.queue.ObservableQueue;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
/**
* Tests the {@link Event#computeQueueName(WorkflowModel, TaskModel)} and {@link
* Event#getQueue(String, String)} methods with a real {@link ParametersUtils} object.
*/
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class EventQueueResolutionTest {
private WorkflowDef testWorkflowDefinition;
private EventQueues eventQueues;
private ParametersUtils parametersUtils;
@Autowired private ObjectMapper objectMapper;
@Before
public void setup() {
Map<String, EventQueueProvider> providers = new HashMap<>();
providers.put("sqs", new MockQueueProvider("sqs"));
providers.put("conductor", new MockQueueProvider("conductor"));
parametersUtils = new ParametersUtils(objectMapper);
eventQueues = new EventQueues(providers, parametersUtils);
testWorkflowDefinition = new WorkflowDef();
testWorkflowDefinition.setName("testWorkflow");
testWorkflowDefinition.setVersion(2);
}
@Test
public void testSinkParam() {
String sink = "sqs:queue_name";
WorkflowDef def = new WorkflowDef();
def.setName("wf0");
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
TaskModel task1 = new TaskModel();
task1.setReferenceTaskName("t1");
task1.addOutput("q", "t1_queue");
workflow.getTasks().add(task1);
TaskModel task2 = new TaskModel();
task2.setReferenceTaskName("t2");
task2.addOutput("q", "task2_queue");
workflow.getTasks().add(task2);
TaskModel task = new TaskModel();
task.setReferenceTaskName("event");
task.getInputData().put("sink", sink);
task.setTaskType(TaskType.EVENT.name());
workflow.getTasks().add(task);
Event event = new Event(eventQueues, parametersUtils, objectMapper);
String queueName = event.computeQueueName(workflow, task);
ObservableQueue queue = event.getQueue(queueName, task.getTaskId());
assertNotNull(task.getReasonForIncompletion(), queue);
assertEquals("queue_name", queue.getName());
assertEquals("sqs", queue.getType());
sink = "sqs:${t1.output.q}";
task.getInputData().put("sink", sink);
queueName = event.computeQueueName(workflow, task);
queue = event.getQueue(queueName, task.getTaskId());
assertNotNull(queue);
assertEquals("t1_queue", queue.getName());
assertEquals("sqs", queue.getType());
sink = "sqs:${t2.output.q}";
task.getInputData().put("sink", sink);
queueName = event.computeQueueName(workflow, task);
queue = event.getQueue(queueName, task.getTaskId());
assertNotNull(queue);
assertEquals("task2_queue", queue.getName());
assertEquals("sqs", queue.getType());
sink = "conductor";
task.getInputData().put("sink", sink);
queueName = event.computeQueueName(workflow, task);
queue = event.getQueue(queueName, task.getTaskId());
assertNotNull(queue);
assertEquals(
workflow.getWorkflowName() + ":" + task.getReferenceTaskName(), queue.getName());
assertEquals("conductor", queue.getType());
sink = "sqs:static_value";
task.getInputData().put("sink", sink);
queueName = event.computeQueueName(workflow, task);
queue = event.getQueue(queueName, task.getTaskId());
assertNotNull(queue);
assertEquals("static_value", queue.getName());
assertEquals("sqs", queue.getType());
}
@Test
public void testDynamicSinks() {
Event event = new Event(eventQueues, parametersUtils, objectMapper);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(testWorkflowDefinition);
TaskModel task = new TaskModel();
task.setReferenceTaskName("task0");
task.setTaskId("task_id_0");
task.setStatus(TaskModel.Status.IN_PROGRESS);
task.getInputData().put("sink", "conductor:some_arbitary_queue");
String queueName = event.computeQueueName(workflow, task);
ObservableQueue queue = event.getQueue(queueName, task.getTaskId());
assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus());
assertNotNull(queue);
assertEquals("testWorkflow:some_arbitary_queue", queue.getName());
assertEquals("testWorkflow:some_arbitary_queue", queue.getURI());
assertEquals("conductor", queue.getType());
task.getInputData().put("sink", "conductor");
queueName = event.computeQueueName(workflow, task);
queue = event.getQueue(queueName, task.getTaskId());
assertEquals(
"not in progress: " + task.getReasonForIncompletion(),
TaskModel.Status.IN_PROGRESS,
task.getStatus());
assertNotNull(queue);
assertEquals("testWorkflow:task0", queue.getName());
task.getInputData().put("sink", "sqs:my_sqs_queue_name");
queueName = event.computeQueueName(workflow, task);
queue = event.getQueue(queueName, task.getTaskId());
assertEquals(
"not in progress: " + task.getReasonForIncompletion(),
TaskModel.Status.IN_PROGRESS,
task.getStatus());
assertNotNull(queue);
assertEquals("my_sqs_queue_name", queue.getName());
assertEquals("sqs", queue.getType());
}
}
| 6,605 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorkerCoordinator.java | /*
* Copyright 2021 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.time.Duration;
import java.util.Collections;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.core.config.ConductorProperties;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestSystemTaskWorkerCoordinator {
private static final String TEST_QUEUE = "test";
private static final String EXECUTION_NAMESPACE_CONSTANT = "@exeNS";
private SystemTaskWorker systemTaskWorker;
private ConductorProperties properties;
@Before
public void setUp() {
systemTaskWorker = mock(SystemTaskWorker.class);
properties = mock(ConductorProperties.class);
when(properties.getSystemTaskWorkerPollInterval()).thenReturn(Duration.ofMillis(50));
when(properties.getSystemTaskWorkerExecutionNamespace()).thenReturn("");
}
@Test
public void testIsFromCoordinatorExecutionNameSpace() {
doReturn("exeNS").when(properties).getSystemTaskWorkerExecutionNamespace();
SystemTaskWorkerCoordinator systemTaskWorkerCoordinator =
new SystemTaskWorkerCoordinator(
systemTaskWorker, properties, Collections.emptySet());
assertTrue(
systemTaskWorkerCoordinator.isFromCoordinatorExecutionNameSpace(
new TaskWithExecutionNamespace()));
}
static class TaskWithExecutionNamespace extends WorkflowSystemTask {
public TaskWithExecutionNamespace() {
super(TEST_QUEUE + EXECUTION_NAMESPACE_CONSTANT);
}
}
}
| 6,606 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorker.java | /*
* Copyright 2021 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.execution.AsyncSystemTaskExecutor;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.service.ExecutionService;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TestSystemTaskWorker {
private static final String TEST_TASK = "system_task";
private static final String ISOLATED_TASK = "system_task-isolated";
private AsyncSystemTaskExecutor asyncSystemTaskExecutor;
private ExecutionService executionService;
private QueueDAO queueDAO;
private ConductorProperties properties;
private SystemTaskWorker systemTaskWorker;
@Before
public void setUp() {
asyncSystemTaskExecutor = mock(AsyncSystemTaskExecutor.class);
executionService = mock(ExecutionService.class);
queueDAO = mock(QueueDAO.class);
properties = mock(ConductorProperties.class);
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(10);
when(properties.getIsolatedSystemTaskWorkerThreadCount()).thenReturn(10);
when(properties.getSystemTaskWorkerCallbackDuration()).thenReturn(Duration.ofSeconds(30));
when(properties.getSystemTaskWorkerPollInterval()).thenReturn(Duration.ofSeconds(30));
systemTaskWorker =
new SystemTaskWorker(
queueDAO, asyncSystemTaskExecutor, properties, executionService);
systemTaskWorker.start();
}
@After
public void tearDown() {
systemTaskWorker.queueExecutionConfigMap.clear();
systemTaskWorker.stop();
}
@Test
public void testGetExecutionConfigForSystemTask() {
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(5);
systemTaskWorker =
new SystemTaskWorker(
queueDAO, asyncSystemTaskExecutor, properties, executionService);
assertEquals(
systemTaskWorker.getExecutionConfig("").getSemaphoreUtil().availableSlots(), 5);
}
@Test
public void testGetExecutionConfigForIsolatedSystemTask() {
when(properties.getIsolatedSystemTaskWorkerThreadCount()).thenReturn(7);
systemTaskWorker =
new SystemTaskWorker(
queueDAO, asyncSystemTaskExecutor, properties, executionService);
assertEquals(
systemTaskWorker.getExecutionConfig("test-iso").getSemaphoreUtil().availableSlots(),
7);
}
@Test
public void testPollAndExecuteSystemTask() throws Exception {
when(queueDAO.pop(anyString(), anyInt(), anyInt()))
.thenReturn(Collections.singletonList("taskId"));
CountDownLatch latch = new CountDownLatch(1);
doAnswer(
invocation -> {
latch.countDown();
return null;
})
.when(asyncSystemTaskExecutor)
.execute(any(), anyString());
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
latch.await();
verify(asyncSystemTaskExecutor).execute(any(), anyString());
}
@Test
public void testBatchPollAndExecuteSystemTask() throws Exception {
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenReturn(List.of("t1", "t1"));
CountDownLatch latch = new CountDownLatch(2);
doAnswer(
invocation -> {
latch.countDown();
return null;
})
.when(asyncSystemTaskExecutor)
.execute(any(), eq("t1"));
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
latch.await();
verify(asyncSystemTaskExecutor, Mockito.times(2)).execute(any(), eq("t1"));
}
@Test
public void testPollAndExecuteIsolatedSystemTask() throws Exception {
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenReturn(List.of("isolated_taskId"));
CountDownLatch latch = new CountDownLatch(1);
doAnswer(
invocation -> {
latch.countDown();
return null;
})
.when(asyncSystemTaskExecutor)
.execute(any(), eq("isolated_taskId"));
systemTaskWorker.pollAndExecute(new IsolatedTask(), ISOLATED_TASK);
latch.await();
verify(asyncSystemTaskExecutor, Mockito.times(1)).execute(any(), eq("isolated_taskId"));
}
@Test
public void testPollException() {
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(1);
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenThrow(RuntimeException.class);
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
verify(asyncSystemTaskExecutor, Mockito.never()).execute(any(), anyString());
}
@Test
public void testBatchPollException() {
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(2);
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenThrow(RuntimeException.class);
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
verify(asyncSystemTaskExecutor, Mockito.never()).execute(any(), anyString());
}
static class TestTask extends WorkflowSystemTask {
public TestTask() {
super(TEST_TASK);
}
}
static class IsolatedTask extends WorkflowSystemTask {
public IsolatedTask() {
super(ISOLATED_TASK);
}
}
}
| 6,607 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestNoop.java | /*
* Copyright 2023 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import org.junit.Test;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
public class TestNoop {
private final WorkflowExecutor executor = mock(WorkflowExecutor.class);
@Test
public void should_do_nothing() {
WorkflowModel workflow = new WorkflowModel();
Noop noopTask = new Noop();
TaskModel task = new TaskModel();
noopTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
}
}
| 6,608 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSubWorkflow.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.exception.NonTransientException;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.operation.StartWorkflowOperation;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class TestSubWorkflow {
private WorkflowExecutor workflowExecutor;
private SubWorkflow subWorkflow;
private StartWorkflowOperation startWorkflowOperation;
@Autowired private ObjectMapper objectMapper;
@Before
public void setup() {
workflowExecutor = mock(WorkflowExecutor.class);
startWorkflowOperation = mock(StartWorkflowOperation.class);
subWorkflow = new SubWorkflow(objectMapper, startWorkflowOperation);
}
@Test
public void testStartSubWorkflow() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 3);
task.setInputData(inputData);
String workflowId = "workflow_1";
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(3);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn(workflowId);
when(workflowExecutor.getWorkflow(anyString(), eq(false))).thenReturn(workflow);
workflow.setStatus(WorkflowModel.Status.RUNNING);
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
assertEquals(TaskModel.Status.IN_PROGRESS, task.getStatus());
workflow.setStatus(WorkflowModel.Status.TERMINATED);
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
assertEquals(TaskModel.Status.CANCELED, task.getStatus());
workflow.setStatus(WorkflowModel.Status.COMPLETED);
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
}
@Test
public void testStartSubWorkflowQueueFailure() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
task.setStatus(TaskModel.Status.SCHEDULED);
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 3);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(3);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput))
.thenThrow(new TransientException("QueueDAO failure"));
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertNull("subWorkflowId should be null", task.getSubWorkflowId());
assertEquals(TaskModel.Status.SCHEDULED, task.getStatus());
assertTrue("Output data should be empty", task.getOutputData().isEmpty());
}
@Test
public void testStartSubWorkflowStartError() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
task.setStatus(TaskModel.Status.SCHEDULED);
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 3);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(3);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
String failureReason = "non transient failure";
when(startWorkflowOperation.execute(startWorkflowInput))
.thenThrow(new NonTransientException(failureReason));
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertNull("subWorkflowId should be null", task.getSubWorkflowId());
assertEquals(TaskModel.Status.FAILED, task.getStatus());
assertEquals(failureReason, task.getReasonForIncompletion());
assertTrue("Output data should be empty", task.getOutputData().isEmpty());
}
@Test
public void testStartSubWorkflowWithEmptyWorkflowInput() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 3);
Map<String, Object> workflowInput = new HashMap<>();
inputData.put("workflowInput", workflowInput);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(3);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
}
@Test
public void testStartSubWorkflowWithWorkflowInput() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 3);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("test", "value");
inputData.put("workflowInput", workflowInput);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(3);
startWorkflowInput.setWorkflowInput(workflowInput);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
}
@Test
public void testStartSubWorkflowTaskToDomain() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
Map<String, String> taskToDomain =
new HashMap<>() {
{
put("*", "unittest");
}
};
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 2);
inputData.put("subWorkflowTaskToDomain", taskToDomain);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(2);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(taskToDomain);
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
}
@Test
public void testExecuteSubWorkflowWithoutId() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 2);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(2);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
assertFalse(subWorkflow.execute(workflowInstance, task, workflowExecutor));
}
@Test
public void testExecuteWorkflowStatus() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
WorkflowModel subWorkflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
Map<String, String> taskToDomain =
new HashMap<>() {
{
put("*", "unittest");
}
};
TaskModel task = new TaskModel();
Map<String, Object> outputData = new HashMap<>();
task.setOutputData(outputData);
task.setSubWorkflowId("sub-workflow-id");
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 2);
inputData.put("subWorkflowTaskToDomain", taskToDomain);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(2);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(taskToDomain);
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
when(workflowExecutor.getWorkflow(eq("sub-workflow-id"), eq(false)))
.thenReturn(subWorkflowInstance);
subWorkflowInstance.setStatus(WorkflowModel.Status.RUNNING);
assertFalse(subWorkflow.execute(workflowInstance, task, workflowExecutor));
assertNull(task.getStatus());
assertNull(task.getReasonForIncompletion());
subWorkflowInstance.setStatus(WorkflowModel.Status.PAUSED);
assertFalse(subWorkflow.execute(workflowInstance, task, workflowExecutor));
assertNull(task.getStatus());
assertNull(task.getReasonForIncompletion());
subWorkflowInstance.setStatus(WorkflowModel.Status.COMPLETED);
assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor));
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
subWorkflowInstance.setStatus(WorkflowModel.Status.FAILED);
subWorkflowInstance.setReasonForIncompletion("unit1");
assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor));
assertEquals(TaskModel.Status.FAILED, task.getStatus());
assertTrue(task.getReasonForIncompletion().contains("unit1"));
subWorkflowInstance.setStatus(WorkflowModel.Status.TIMED_OUT);
subWorkflowInstance.setReasonForIncompletion("unit2");
assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor));
assertEquals(TaskModel.Status.TIMED_OUT, task.getStatus());
assertTrue(task.getReasonForIncompletion().contains("unit2"));
subWorkflowInstance.setStatus(WorkflowModel.Status.TERMINATED);
subWorkflowInstance.setReasonForIncompletion("unit3");
assertTrue(subWorkflow.execute(workflowInstance, task, workflowExecutor));
assertEquals(TaskModel.Status.CANCELED, task.getStatus());
assertTrue(task.getReasonForIncompletion().contains("unit3"));
}
@Test
public void testCancelWithWorkflowId() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
WorkflowModel subWorkflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
task.setSubWorkflowId("sub-workflow-id");
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 2);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(2);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
when(workflowExecutor.getWorkflow(eq("sub-workflow-id"), eq(true)))
.thenReturn(subWorkflowInstance);
workflowInstance.setStatus(WorkflowModel.Status.TIMED_OUT);
subWorkflow.cancel(workflowInstance, task, workflowExecutor);
assertEquals(WorkflowModel.Status.TERMINATED, subWorkflowInstance.getStatus());
}
@Test
public void testCancelWithoutWorkflowId() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
WorkflowModel subWorkflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
TaskModel task = new TaskModel();
Map<String, Object> outputData = new HashMap<>();
task.setOutputData(outputData);
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 2);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("UnitWorkFlow");
startWorkflowInput.setVersion(2);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
when(workflowExecutor.getWorkflow(eq("sub-workflow-id"), eq(false)))
.thenReturn(subWorkflowInstance);
subWorkflow.cancel(workflowInstance, task, workflowExecutor);
assertEquals(WorkflowModel.Status.RUNNING, subWorkflowInstance.getStatus());
}
@Test
public void testIsAsync() {
assertTrue(subWorkflow.isAsync());
}
@Test
public void testStartSubWorkflowWithSubWorkflowDefinition() {
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowInstance = new WorkflowModel();
workflowInstance.setWorkflowDefinition(workflowDef);
WorkflowDef subWorkflowDef = new WorkflowDef();
subWorkflowDef.setName("subWorkflow_1");
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 2);
inputData.put("subWorkflowDefinition", subWorkflowDef);
task.setInputData(inputData);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName("subWorkflow_1");
startWorkflowInput.setVersion(2);
startWorkflowInput.setWorkflowInput(inputData);
startWorkflowInput.setWorkflowDefinition(subWorkflowDef);
startWorkflowInput.setTaskToDomain(workflowInstance.getTaskToDomain());
when(startWorkflowOperation.execute(startWorkflowInput)).thenReturn("workflow_1");
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
}
}
| 6,609 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/InlineTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.execution.evaluators.ValueParamEvaluator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.mock;
public class InlineTest {
private final WorkflowModel workflow = new WorkflowModel();
private final WorkflowExecutor executor = mock(WorkflowExecutor.class);
@Test
public void testInlineTaskValidationFailures() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 1);
inputObj.put("expression", "");
inputObj.put("evaluatorType", "value-param");
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR, task.getStatus());
assertEquals(
"Empty 'expression' in Inline task's input parameters. A non-empty String value must be provided.",
task.getReasonForIncompletion());
inputObj = new HashMap<>();
inputObj.put("value", 1);
inputObj.put("expression", "value");
inputObj.put("evaluatorType", "");
task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR, task.getStatus());
assertEquals(
"Empty 'evaluatorType' in INLINE task's input parameters. A non-empty String value must be provided.",
task.getReasonForIncompletion());
}
@Test
public void testInlineValueParamExpression() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 101);
inputObj.put("expression", "value");
inputObj.put("evaluatorType", "value-param");
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(101, task.getOutputData().get("result"));
inputObj = new HashMap<>();
inputObj.put("value", "StringValue");
inputObj.put("expression", "value");
inputObj.put("evaluatorType", "value-param");
task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals("StringValue", task.getOutputData().get("result"));
}
@SuppressWarnings("unchecked")
@Test
public void testInlineJavascriptExpression() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 101);
inputObj.put(
"expression",
"function e() { if ($.value == 101){return {\"evalResult\": true}} else { return {\"evalResult\": false}}} e();");
inputObj.put("evaluatorType", "javascript");
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(
true, ((Map<String, Object>) task.getOutputData().get("result")).get("evalResult"));
inputObj = new HashMap<>();
inputObj.put("value", "StringValue");
inputObj.put(
"expression",
"function e() { if ($.value == 'StringValue'){return {\"evalResult\": true}} else { return {\"evalResult\": false}}} e();");
inputObj.put("evaluatorType", "javascript");
task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(
true, ((Map<String, Object>) task.getOutputData().get("result")).get("evalResult"));
}
private Map<String, Evaluator> getStringEvaluatorMap() {
Map<String, Evaluator> evaluators = new HashMap<>();
evaluators.put(ValueParamEvaluator.NAME, new ValueParamEvaluator());
evaluators.put(JavascriptEvaluator.NAME, new JavascriptEvaluator());
return evaluators;
}
}
| 6,610 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestTerminate.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationStatusParameter;
import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationWorkflowOutputParameter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class TestTerminate {
private final WorkflowExecutor executor = mock(WorkflowExecutor.class);
@Test
public void should_fail_if_input_status_is_not_valid() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "PAUSED");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
@Test
public void should_fail_if_input_status_is_empty() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
@Test
public void should_fail_if_input_status_is_null() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), null);
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
@Test
public void should_complete_workflow_on_terminate_task_success() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
workflow.setOutput(Collections.singletonMap("output", "${task1.output.value}"));
HashMap<String, Object> expectedOutput =
new HashMap<>() {
{
put("output", "${task0.output.value}");
}
};
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "COMPLETED");
input.put(getTerminationWorkflowOutputParameter(), "${task0.output.value}");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertEquals(expectedOutput, task.getOutputData());
}
@Test
public void should_fail_workflow_on_terminate_task_success() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
workflow.setOutput(Collections.singletonMap("output", "${task1.output.value}"));
HashMap<String, Object> expectedOutput =
new HashMap<>() {
{
put("output", "${task0.output.value}");
}
};
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "FAILED");
input.put(getTerminationWorkflowOutputParameter(), "${task0.output.value}");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertEquals(expectedOutput, task.getOutputData());
}
@Test
public void should_fail_workflow_on_terminate_task_success_with_empty_output() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "FAILED");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertTrue(task.getOutputData().isEmpty());
}
@Test
public void should_fail_workflow_on_terminate_task_success_with_resolved_output() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
HashMap<String, Object> expectedOutput =
new HashMap<>() {
{
put("result", 1);
}
};
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "FAILED");
input.put(getTerminationWorkflowOutputParameter(), expectedOutput);
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
}
}
| 6,611 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class SimpleTaskMapperTest {
private SimpleTaskMapper simpleTaskMapper;
private IDGenerator idGenerator = new IDGenerator();
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
simpleTaskMapper = new SimpleTaskMapper(parametersUtils);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("simple_task");
workflowTask.setTaskDefinition(new TaskDef("simple_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = simpleTaskMapper.getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(1, mappedTasks.size());
}
@Test
public void getMappedTasksException() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("simple_task");
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// then
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Invalid task. Task %s does not have a definition",
workflowTask.getName()));
// when
simpleTaskMapper.getMappedTasks(taskMapperContext);
}
}
| 6,612 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class JsonJQTransformTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("json_jq_transform_task");
workflowTask.setType(TaskType.JSON_JQ_TRANSFORM.name());
workflowTask.setTaskDefinition(new TaskDef("json_jq_transform_task"));
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("in1", new String[] {"a", "b"});
taskInput.put("in2", new String[] {"c", "d"});
taskInput.put("queryExpression", "{ out: (.in1 + .in2) }");
workflowTask.setInputParameters(taskInput);
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new JsonJQTransformTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("json_jq_transform_task");
workflowTask.setType(TaskType.JSON_JQ_TRANSFORM.name());
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("in1", new String[] {"a", "b"});
taskInput.put("in2", new String[] {"c", "d"});
taskInput.put("queryExpression", "{ out: (.in1 + .in2) }");
workflowTask.setInputParameters(taskInput);
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new JsonJQTransformTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType());
}
}
| 6,613 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.mockito.Mockito.mock;
public class TerminateTaskMapperTest {
private ParametersUtils parametersUtils;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_TERMINATE);
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new TerminateTaskMapper(parametersUtils).getMappedTasks(taskMapperContext);
Assert.assertNotNull(mappedTasks);
Assert.assertEquals(1, mappedTasks.size());
Assert.assertEquals(TaskType.TASK_TYPE_TERMINATE, mappedTasks.get(0).getTaskType());
}
}
| 6,614 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class InlineTaskMapperTest {
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("inline_task");
workflowTask.setType(TaskType.INLINE.name());
workflowTask.setTaskDefinition(new TaskDef("inline_task"));
workflowTask.setEvaluatorType(JavascriptEvaluator.NAME);
workflowTask.setExpression(
"function scriptFun() {if ($.input.a==1){return {testValue: true}} else{return "
+ "{testValue: false} }}; scriptFun();");
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new InlineTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.INLINE.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.INLINE.name());
workflowTask.setEvaluatorType(JavascriptEvaluator.NAME);
workflowTask.setExpression(
"function scriptFun() {if ($.input.a==1){return {testValue: true}} else{return "
+ "{testValue: false} }}; scriptFun();");
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new InlineTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.INLINE.name(), mappedTasks.get(0).getTaskType());
}
}
| 6,615 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HUMAN;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class HumanTaskMapperTest {
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("human_task");
workflowTask.setType(TaskType.HUMAN.name());
String taskId = new IDGenerator().generate();
ParametersUtils parametersUtils = mock(ParametersUtils.class);
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withTaskId(taskId)
.build();
HumanTaskMapper humanTaskMapper = new HumanTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = humanTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TASK_TYPE_HUMAN, mappedTasks.get(0).getTaskType());
}
}
| 6,616 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class KafkaPublishTaskMapperTest {
private IDGenerator idGenerator;
private KafkaPublishTaskMapper kafkaTaskMapper;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
kafkaTaskMapper = new KafkaPublishTaskMapper(parametersUtils, metadataDAO);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("kafka_task");
workflowTask.setType(TaskType.KAFKA_PUBLISH.name());
workflowTask.setTaskDefinition(new TaskDef("kafka_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("kafka_task");
workflowTask.setType(TaskType.KAFKA_PUBLISH.name());
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskDef taskdefinition = new TaskDef();
String testExecutionNameSpace = "testExecutionNameSpace";
taskdefinition.setExecutionNameSpace(testExecutionNameSpace);
String testIsolationGroupId = "testIsolationGroupId";
taskdefinition.setIsolationGroupId(testIsolationGroupId);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(taskdefinition)
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
assertEquals(testExecutionNameSpace, mappedTasks.get(0).getExecutionNameSpace());
assertEquals(testIsolationGroupId, mappedTasks.get(0).getIsolationGroupId());
}
}
| 6,617 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
public class EventTaskMapperTest {
@Test
public void getMappedTasks() {
ParametersUtils parametersUtils = Mockito.mock(ParametersUtils.class);
EventTaskMapper eventTaskMapper = new EventTaskMapper(parametersUtils);
WorkflowTask taskToBeScheduled = new WorkflowTask();
taskToBeScheduled.setSink("SQSSINK");
String taskId = new IDGenerator().generate();
Map<String, Object> eventTaskInput = new HashMap<>();
eventTaskInput.put("sink", "SQSSINK");
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(eventTaskInput);
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(taskToBeScheduled)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = eventTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
TaskModel eventTask = mappedTasks.get(0);
assertEquals(taskId, eventTask.getTaskId());
}
}
| 6,618 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK;
import static org.junit.Assert.assertEquals;
public class ForkJoinTaskMapperTest {
private DeciderService deciderService;
private ForkJoinTaskMapper forkJoinTaskMapper;
private IDGenerator idGenerator;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
deciderService = Mockito.mock(DeciderService.class);
forkJoinTaskMapper = new ForkJoinTaskMapper();
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
WorkflowDef def = new WorkflowDef();
def.setName("FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask forkTask = new WorkflowTask();
forkTask.setType(TaskType.FORK_JOIN.name());
forkTask.setTaskReferenceName("forktask");
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "workflow.input.param1");
ip1.put("p2", "workflow.input.param2");
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("t1");
WorkflowTask wft3 = new WorkflowTask();
wft3.setName("junit_task_3");
wft3.setInputParameters(ip1);
wft3.setTaskReferenceName("t3");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "workflow.input.param1");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
WorkflowTask wft4 = new WorkflowTask();
wft4.setName("junit_task_4");
wft4.setInputParameters(ip2);
wft4.setTaskReferenceName("t4");
forkTask.getForkTasks().add(Arrays.asList(wft1, wft3));
forkTask.getForkTasks().add(Collections.singletonList(wft2));
def.getTasks().add(forkTask);
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("forktask_join");
join.setJoinOn(Arrays.asList("t3", "t2"));
def.getTasks().add(join);
def.getTasks().add(wft4);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
TaskModel task1 = new TaskModel();
task1.setReferenceTaskName(wft1.getTaskReferenceName());
TaskModel task3 = new TaskModel();
task3.setReferenceTaskName(wft3.getTaskReferenceName());
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1, 0))
.thenReturn(Collections.singletonList(task1));
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2, 0))
.thenReturn(Collections.singletonList(task3));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withWorkflowTask(forkTask)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
List<TaskModel> mappedTasks = forkJoinTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(3, mappedTasks.size());
assertEquals(TASK_TYPE_FORK, mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasksException() {
WorkflowDef def = new WorkflowDef();
def.setName("FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask forkTask = new WorkflowTask();
forkTask.setType(TaskType.FORK_JOIN.name());
forkTask.setTaskReferenceName("forktask");
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "workflow.input.param1");
ip1.put("p2", "workflow.input.param2");
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("t1");
WorkflowTask wft3 = new WorkflowTask();
wft3.setName("junit_task_3");
wft3.setInputParameters(ip1);
wft3.setTaskReferenceName("t3");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "workflow.input.param1");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
WorkflowTask wft4 = new WorkflowTask();
wft4.setName("junit_task_4");
wft4.setInputParameters(ip2);
wft4.setTaskReferenceName("t4");
forkTask.getForkTasks().add(Arrays.asList(wft1, wft3));
forkTask.getForkTasks().add(Collections.singletonList(wft2));
def.getTasks().add(forkTask);
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("forktask_join");
join.setJoinOn(Arrays.asList("t3", "t2"));
def.getTasks().add(wft4);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
TaskModel task1 = new TaskModel();
task1.setReferenceTaskName(wft1.getTaskReferenceName());
TaskModel task3 = new TaskModel();
task3.setReferenceTaskName(wft3.getTaskReferenceName());
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1, 0))
.thenReturn(Collections.singletonList(task1));
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2, 0))
.thenReturn(Collections.singletonList(task3));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withWorkflowTask(forkTask)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
"Fork task definition is not followed by a join task. Check the blueprint");
forkJoinTaskMapper.getMappedTasks(taskMapperContext);
}
}
| 6,619 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class DecisionTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private DeciderService deciderService;
// Subject
private DecisionTaskMapper decisionTaskMapper;
@Autowired private ObjectMapper objectMapper;
@Rule public ExpectedException expectedException = ExpectedException.none();
Map<String, Object> ip1;
WorkflowTask task1;
WorkflowTask task2;
WorkflowTask task3;
@Before
public void setUp() {
parametersUtils = new ParametersUtils(objectMapper);
idGenerator = new IDGenerator();
ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("case", "${workflow.input.case}");
task1 = new WorkflowTask();
task1.setName("Test1");
task1.setInputParameters(ip1);
task1.setTaskReferenceName("t1");
task2 = new WorkflowTask();
task2.setName("Test2");
task2.setInputParameters(ip1);
task2.setTaskReferenceName("t2");
task3 = new WorkflowTask();
task3.setName("Test3");
task3.setInputParameters(ip1);
task3.setTaskReferenceName("t3");
deciderService = mock(DeciderService.class);
decisionTaskMapper = new DecisionTaskMapper();
}
@Test
public void getMappedTasks() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Decision task instance
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
decisionTask.getInputParameters().put("Id", "${workflow.input.Id}");
decisionTask.setCaseExpression(
"if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(decisionTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = decisionTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(2, mappedTasks.size());
assertEquals("decisionTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName());
}
@Test
public void getEvaluatedCaseValue() {
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setInputParameters(ip1);
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("0", Collections.singletonList(task2));
decisionCases.put("1", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(new WorkflowDef());
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("param1", "test1");
workflowInput.put("param2", "test2");
workflowInput.put("case", "0");
workflowModel.setInput(workflowInput);
Map<String, Object> input =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, null, null);
assertEquals("0", decisionTaskMapper.getEvaluatedCaseValue(decisionTask, input));
}
@Test
public void getEvaluatedCaseValueUsingExpression() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Decision task instance
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
decisionTask.getInputParameters().put("Id", "${workflow.input.Id}");
decisionTask.setCaseExpression(
"if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef def = new WorkflowDef();
def.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> evaluatorInput =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, taskDef, null);
assertEquals(
"even", decisionTaskMapper.getEvaluatedCaseValue(decisionTask, evaluatorInput));
}
@Test
public void getEvaluatedCaseValueException() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Decision task instance
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
decisionTask.getInputParameters().put("Id", "${workflow.input.Id}");
decisionTask.setCaseExpression(
"if ($Id == null) 'bad input'; else if ( ($Id != null && $Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef def = new WorkflowDef();
def.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put(".Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> evaluatorInput =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, taskDef, null);
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
"Error while evaluating script: " + decisionTask.getCaseExpression());
decisionTaskMapper.getEvaluatedCaseValue(decisionTask, evaluatorInput);
}
}
| 6,620 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.tasks.Wait;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
public class WaitTaskMapperTest {
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
ParametersUtils parametersUtils = mock(ParametersUtils.class);
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TASK_TYPE_WAIT, mappedTasks.get(0).getTaskType());
}
@Test
public void testWaitForever() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
ParametersUtils parametersUtils = mock(ParametersUtils.class);
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.IN_PROGRESS);
assertTrue(mappedTasks.get(0).getOutputData().isEmpty());
}
@Test
public void testWaitUntil() {
String dateFormat = "yyyy-MM-dd HH:mm";
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormat);
LocalDateTime now = LocalDateTime.now();
String formatted = formatter.format(now);
System.out.println(formatted);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
Map<String, Object> input = Map.of(Wait.UNTIL_INPUT, formatted);
workflowTask.setInputParameters(input);
ParametersUtils parametersUtils = mock(ParametersUtils.class);
doReturn(input).when(parametersUtils).getTaskInputV2(any(), any(), any(), any());
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(Map.of(Wait.UNTIL_INPUT, formatted))
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.IN_PROGRESS);
assertEquals(mappedTasks.get(0).getCallbackAfterSeconds(), 0L);
}
@Test
public void testWaitDuration() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
Map<String, Object> input = Map.of(Wait.DURATION_INPUT, "1s");
workflowTask.setInputParameters(input);
ParametersUtils parametersUtils = mock(ParametersUtils.class);
doReturn(input).when(parametersUtils).getTaskInputV2(any(), any(), any(), any());
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(Map.of(Wait.DURATION_INPUT, "1s"))
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.IN_PROGRESS);
assertTrue(mappedTasks.get(0).getCallbackAfterSeconds() <= 1L);
}
@Test
public void testInvalidWaitConfig() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
Map<String, Object> input =
Map.of(Wait.DURATION_INPUT, "1s", Wait.UNTIL_INPUT, "2022-12-12");
workflowTask.setInputParameters(input);
ParametersUtils parametersUtils = mock(ParametersUtils.class);
doReturn(input).when(parametersUtils).getTaskInputV2(any(), any(), any(), any());
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(
Map.of(Wait.DURATION_INPUT, "1s", Wait.UNTIL_INPUT, "2022-12-12"))
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.FAILED_WITH_TERMINAL_ERROR);
}
}
| 6,621 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class JoinTaskMapperTest {
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.JOIN.name());
workflowTask.setJoinOn(Arrays.asList("task1", "task2"));
String taskId = new IDGenerator().generate();
WorkflowDef wd = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(wd);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = new JoinTaskMapper().getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(TASK_TYPE_JOIN, mappedTasks.get(0).getTaskType());
}
}
| 6,622 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SubWorkflowTaskMapperTest {
private SubWorkflowTaskMapper subWorkflowTaskMapper;
private ParametersUtils parametersUtils;
private DeciderService deciderService;
private IDGenerator idGenerator;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
subWorkflowTaskMapper = new SubWorkflowTaskMapper(parametersUtils, metadataDAO);
deciderService = mock(DeciderService.class);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
WorkflowTask workflowTask = new WorkflowTask();
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName("Foo");
subWorkflowParams.setVersion(2);
workflowTask.setSubWorkflowParam(subWorkflowParams);
workflowTask.setStartDelay(30);
Map<String, Object> taskInput = new HashMap<>();
Map<String, String> taskToDomain =
new HashMap<>() {
{
put("*", "unittest");
}
};
Map<String, Object> subWorkflowParamMap = new HashMap<>();
subWorkflowParamMap.put("name", "FooWorkFlow");
subWorkflowParamMap.put("version", 2);
subWorkflowParamMap.put("taskToDomain", taskToDomain);
when(parametersUtils.getTaskInputV2(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(subWorkflowParamMap);
// When
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
List<TaskModel> mappedTasks = subWorkflowTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertFalse(mappedTasks.isEmpty());
assertEquals(1, mappedTasks.size());
TaskModel subWorkFlowTask = mappedTasks.get(0);
assertEquals(TaskModel.Status.SCHEDULED, subWorkFlowTask.getStatus());
assertEquals(TASK_TYPE_SUB_WORKFLOW, subWorkFlowTask.getTaskType());
assertEquals(30, subWorkFlowTask.getCallbackAfterSeconds());
assertEquals(taskToDomain, subWorkFlowTask.getInputData().get("subWorkflowTaskToDomain"));
}
@Test
public void testTaskToDomain() {
// Given
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
WorkflowTask workflowTask = new WorkflowTask();
Map<String, String> taskToDomain =
new HashMap<>() {
{
put("*", "unittest");
}
};
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName("Foo");
subWorkflowParams.setVersion(2);
subWorkflowParams.setTaskToDomain(taskToDomain);
workflowTask.setSubWorkflowParam(subWorkflowParams);
Map<String, Object> taskInput = new HashMap<>();
Map<String, Object> subWorkflowParamMap = new HashMap<>();
subWorkflowParamMap.put("name", "FooWorkFlow");
subWorkflowParamMap.put("version", 2);
when(parametersUtils.getTaskInputV2(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(subWorkflowParamMap);
// When
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(new IDGenerator().generate())
.withDeciderService(deciderService)
.build();
List<TaskModel> mappedTasks = subWorkflowTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertFalse(mappedTasks.isEmpty());
assertEquals(1, mappedTasks.size());
TaskModel subWorkFlowTask = mappedTasks.get(0);
assertEquals(TaskModel.Status.SCHEDULED, subWorkFlowTask.getStatus());
assertEquals(TASK_TYPE_SUB_WORKFLOW, subWorkFlowTask.getTaskType());
}
@Test
public void getSubWorkflowParams() {
WorkflowTask workflowTask = new WorkflowTask();
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName("Foo");
subWorkflowParams.setVersion(2);
workflowTask.setSubWorkflowParam(subWorkflowParams);
assertEquals(subWorkflowParams, subWorkflowTaskMapper.getSubWorkflowParams(workflowTask));
}
@Test
public void getExceptionWhenNoSubWorkflowParamsPassed() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("FooWorkFLow");
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Task %s is defined as sub-workflow and is missing subWorkflowParams. "
+ "Please check the workflow definition",
workflowTask.getName()));
subWorkflowTaskMapper.getSubWorkflowParams(workflowTask);
}
}
| 6,623 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class DynamicTaskMapperTest {
@Rule public ExpectedException expectedException = ExpectedException.none();
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
private DynamicTaskMapper dynamicTaskMapper;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
dynamicTaskMapper = new DynamicTaskMapper(parametersUtils, metadataDAO);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("DynoTask");
workflowTask.setDynamicTaskNameParam("dynamicTaskName");
TaskDef taskDef = new TaskDef();
taskDef.setName("DynoTask");
workflowTask.setTaskDefinition(taskDef);
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("dynamicTaskName", "DynoTask");
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(taskInput);
String taskId = new IDGenerator().generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(workflowTask.getTaskDefinition())
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(taskId)
.build();
when(metadataDAO.getTaskDef("DynoTask")).thenReturn(new TaskDef());
List<TaskModel> mappedTasks = dynamicTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
TaskModel dynamicTask = mappedTasks.get(0);
assertEquals(taskId, dynamicTask.getTaskId());
}
@Test
public void getDynamicTaskName() {
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("dynamicTaskName", "DynoTask");
String dynamicTaskName = dynamicTaskMapper.getDynamicTaskName(taskInput, "dynamicTaskName");
assertEquals("DynoTask", dynamicTaskName);
}
@Test
public void getDynamicTaskNameNotAvailable() {
Map<String, Object> taskInput = new HashMap<>();
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Cannot map a dynamic task based on the parameter and input. "
+ "Parameter= %s, input= %s",
"dynamicTaskName", taskInput));
dynamicTaskMapper.getDynamicTaskName(taskInput, "dynamicTaskName");
}
@Test
public void getDynamicTaskDefinition() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Foo");
TaskDef taskDef = new TaskDef();
taskDef.setName("Foo");
workflowTask.setTaskDefinition(taskDef);
when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef());
// when
TaskDef dynamicTaskDefinition = dynamicTaskMapper.getDynamicTaskDefinition(workflowTask);
assertEquals(dynamicTaskDefinition, taskDef);
}
@Test
public void getDynamicTaskDefinitionNull() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Foo");
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Invalid task specified. Cannot find task by name %s in the task definitions",
workflowTask.getName()));
dynamicTaskMapper.getDynamicTaskDefinition(workflowTask);
}
}
| 6,624 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class UserDefinedTaskMapperTest {
private IDGenerator idGenerator;
private UserDefinedTaskMapper userDefinedTaskMapper;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
userDefinedTaskMapper = new UserDefinedTaskMapper(parametersUtils, metadataDAO);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("user_task");
workflowTask.setType(TaskType.USER_DEFINED.name());
workflowTask.setTaskDefinition(new TaskDef("user_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = userDefinedTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.USER_DEFINED.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasksException() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("user_task");
workflowTask.setType(TaskType.USER_DEFINED.name());
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// then
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Invalid task specified. Cannot find task by name %s in the task definitions",
workflowTask.getName()));
// when
userDefinedTaskMapper.getMappedTasks(taskMapperContext);
}
}
| 6,625 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/NoopTaskMapperTest.java | /*
* Copyright 2023 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
public class NoopTaskMapperTest {
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_NOOP);
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = new NoopTaskMapper().getMappedTasks(taskMapperContext);
Assert.assertNotNull(mappedTasks);
Assert.assertEquals(1, mappedTasks.size());
Assert.assertEquals(TaskType.TASK_TYPE_NOOP, mappedTasks.get(0).getTaskType());
}
}
| 6,626 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class HTTPTaskMapperTest {
private HTTPTaskMapper httpTaskMapper;
private IDGenerator idGenerator;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
httpTaskMapper = new HTTPTaskMapper(parametersUtils, metadataDAO);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("http_task");
workflowTask.setType(TaskType.HTTP.name());
workflowTask.setTaskDefinition(new TaskDef("http_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("http_task");
workflowTask.setType(TaskType.HTTP.name());
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType());
}
}
| 6,627 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class LambdaTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("lambda_task");
workflowTask.setType(TaskType.LAMBDA.name());
workflowTask.setTaskDefinition(new TaskDef("lambda_task"));
workflowTask.setScriptExpression(
"if ($.input.a==1){return {testValue: true}} else{return {testValue: false} }");
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new LambdaTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.LAMBDA.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.LAMBDA.name());
workflowTask.setScriptExpression(
"if ($.input.a==1){return {testValue: true}} else{return {testValue: false} }");
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new LambdaTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.LAMBDA.name(), mappedTasks.get(0).getTaskType());
}
}
| 6,628 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
public class SetVariableTaskMapperTest {
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_SET_VARIABLE);
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = new SetVariableTaskMapper().getMappedTasks(taskMapperContext);
Assert.assertNotNull(mappedTasks);
Assert.assertEquals(1, mappedTasks.size());
Assert.assertEquals(TaskType.TASK_TYPE_SET_VARIABLE, mappedTasks.get(0).getTaskType());
}
}
| 6,629 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DO_WHILE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class DoWhileTaskMapperTest {
private TaskModel task1;
private DeciderService deciderService;
private WorkflowModel workflow;
private WorkflowTask workflowTask1;
private TaskMapperContext taskMapperContext;
private MetadataDAO metadataDAO;
private ParametersUtils parametersUtils;
@Before
public void setup() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.DO_WHILE.name());
workflowTask.setTaskReferenceName("Test");
workflowTask.setInputParameters(Map.of("value", "${workflow.input.foo}"));
task1 = new TaskModel();
task1.setReferenceTaskName("task1");
TaskModel task2 = new TaskModel();
task2.setReferenceTaskName("task2");
workflowTask1 = new WorkflowTask();
workflowTask1.setTaskReferenceName("task1");
WorkflowTask workflowTask2 = new WorkflowTask();
workflowTask2.setTaskReferenceName("task2");
task1.setWorkflowTask(workflowTask1);
task2.setWorkflowTask(workflowTask2);
workflowTask.setLoopOver(Arrays.asList(task1.getWorkflowTask(), task2.getWorkflowTask()));
workflowTask.setLoopCondition(
"if ($.second_task + $.first_task > 10) { false; } else { true; }");
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
workflow.setInput(Map.of("foo", "bar"));
deciderService = Mockito.mock(DeciderService.class);
metadataDAO = Mockito.mock(MetadataDAO.class);
taskMapperContext =
TaskMapperContext.newBuilder()
.withDeciderService(deciderService)
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
parametersUtils = new ParametersUtils(new ObjectMapper());
}
@Test
public void getMappedTasks() {
Mockito.doReturn(Collections.singletonList(task1))
.when(deciderService)
.getTasksToBeScheduled(workflow, workflowTask1, 0);
List<TaskModel> mappedTasks =
new DoWhileTaskMapper(metadataDAO, parametersUtils)
.getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(mappedTasks.size(), 1);
assertEquals(TASK_TYPE_DO_WHILE, mappedTasks.get(0).getTaskType());
assertNotNull(mappedTasks.get(0).getInputData());
assertEquals(Map.of("value", "bar"), mappedTasks.get(0).getInputData());
}
@Test
public void shouldNotScheduleCompletedTask() {
task1.setStatus(TaskModel.Status.COMPLETED);
List<TaskModel> mappedTasks =
new DoWhileTaskMapper(metadataDAO, parametersUtils)
.getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(mappedTasks.size(), 1);
}
@Test
public void testAppendIteration() {
assertEquals("task__1", TaskUtils.appendIteration("task", 1));
}
}
| 6,630 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.execution.evaluators.ValueParamEvaluator;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ContextConfiguration(
classes = {
TestObjectMapperConfiguration.class,
SwitchTaskMapperTest.TestConfiguration.class
})
@RunWith(SpringRunner.class)
public class SwitchTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private DeciderService deciderService;
// Subject
private SwitchTaskMapper switchTaskMapper;
@Configuration
@ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans.
public static class TestConfiguration {}
@Autowired private ObjectMapper objectMapper;
@Autowired private Map<String, Evaluator> evaluators;
@Rule public ExpectedException expectedException = ExpectedException.none();
Map<String, Object> ip1;
WorkflowTask task1;
WorkflowTask task2;
WorkflowTask task3;
@Before
public void setUp() {
parametersUtils = new ParametersUtils(objectMapper);
idGenerator = new IDGenerator();
ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("case", "${workflow.input.case}");
task1 = new WorkflowTask();
task1.setName("Test1");
task1.setInputParameters(ip1);
task1.setTaskReferenceName("t1");
task2 = new WorkflowTask();
task2.setName("Test2");
task2.setInputParameters(ip1);
task2.setTaskReferenceName("t2");
task3 = new WorkflowTask();
task3.setName("Test3");
task3.setInputParameters(ip1);
task3.setTaskReferenceName("t3");
deciderService = mock(DeciderService.class);
switchTaskMapper = new SwitchTaskMapper(evaluators);
}
@Test
public void getMappedTasks() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Switch task instance
WorkflowTask switchTask = new WorkflowTask();
switchTask.setType(TaskType.SWITCH.name());
switchTask.setName("Switch");
switchTask.setTaskReferenceName("switchTask");
switchTask.setDefaultCase(Collections.singletonList(task1));
switchTask.getInputParameters().put("Id", "${workflow.input.Id}");
switchTask.setEvaluatorType(JavascriptEvaluator.NAME);
switchTask.setExpression(
"if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
switchTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
switchTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(switchTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(2, mappedTasks.size());
assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName());
}
@Test
public void getMappedTasksWithValueParamEvaluator() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Switch task instance
WorkflowTask switchTask = new WorkflowTask();
switchTask.setType(TaskType.SWITCH.name());
switchTask.setName("Switch");
switchTask.setTaskReferenceName("switchTask");
switchTask.setDefaultCase(Collections.singletonList(task1));
switchTask.getInputParameters().put("Id", "${workflow.input.Id}");
switchTask.setEvaluatorType(ValueParamEvaluator.NAME);
switchTask.setExpression("Id");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
switchTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "even");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
switchTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(switchTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(2, mappedTasks.size());
assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName());
}
@Test
public void getMappedTasksWhenEvaluatorThrowsException() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Switch task instance
WorkflowTask switchTask = new WorkflowTask();
switchTask.setType(TaskType.SWITCH.name());
switchTask.setName("Switch");
switchTask.setTaskReferenceName("switchTask");
switchTask.setDefaultCase(Collections.singletonList(task1));
switchTask.setEvaluatorType(JavascriptEvaluator.NAME);
switchTask.setExpression("undefinedVariable");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
switchTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
switchTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(switchTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals(TaskModel.Status.FAILED, mappedTasks.get(0).getStatus());
}
}
| 6,631 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.*;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
@SuppressWarnings("unchecked")
public class ForkJoinDynamicTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private ObjectMapper objectMapper;
private DeciderService deciderService;
private ForkJoinDynamicTaskMapper forkJoinDynamicTaskMapper;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
MetadataDAO metadataDAO = Mockito.mock(MetadataDAO.class);
idGenerator = new IDGenerator();
parametersUtils = Mockito.mock(ParametersUtils.class);
objectMapper = Mockito.mock(ObjectMapper.class);
deciderService = Mockito.mock(DeciderService.class);
forkJoinDynamicTaskMapper =
new ForkJoinDynamicTaskMapper(
idGenerator, parametersUtils, objectMapper, metadataDAO);
}
@Test
public void getMappedTasksException() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
TaskModel simpleTask1 = new TaskModel();
simpleTask1.setReferenceTaskName("xdt1");
TaskModel simpleTask2 = new TaskModel();
simpleTask2.setReferenceTaskName("xdt2");
when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0))
.thenReturn(Collections.singletonList(simpleTask1));
when(deciderService.getTasksToBeScheduled(workflowModel, wt3, 0))
.thenReturn(Collections.singletonList(simpleTask2));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(dynamicForkJoinToSchedule)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
// then
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
}
@Test
public void getMappedTasks() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
def.getTasks().add(join);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
TaskModel simpleTask1 = new TaskModel();
simpleTask1.setReferenceTaskName("xdt1");
TaskModel simpleTask2 = new TaskModel();
simpleTask2.setReferenceTaskName("xdt2");
when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0))
.thenReturn(Collections.singletonList(simpleTask1));
when(deciderService.getTasksToBeScheduled(workflowModel, wt3, 0))
.thenReturn(Collections.singletonList(simpleTask2));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(dynamicForkJoinToSchedule)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
// then
List<TaskModel> mappedTasks = forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(4, mappedTasks.size());
assertEquals(TASK_TYPE_FORK, mappedTasks.get(0).getTaskType());
assertEquals(TASK_TYPE_JOIN, mappedTasks.get(3).getTaskType());
List<String> joinTaskNames = (List<String>) mappedTasks.get(3).getInputData().get("joinOn");
assertEquals("xdt1, xdt2", String.join(", ", joinTaskNames));
}
@Test
public void getDynamicForkJoinTasksAndInput() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList();
Map<String, Object> input = new HashMap<>();
input.put("k1", "v1");
dtasks.add("junit_task_2", null, "xdt1", input);
HashMap<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
dtasks.add("junit_task_3", null, "xdt2", input2);
Map<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("dynamicTasks", dtasks);
// when
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(dtasks);
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> dynamicForkJoinTasksAndInput =
forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput(
dynamicForkJoinToSchedule, new WorkflowModel());
// then
assertNotNull(dynamicForkJoinTasksAndInput.getLeft());
assertEquals(2, dynamicForkJoinTasksAndInput.getLeft().size());
assertEquals(2, dynamicForkJoinTasksAndInput.getRight().size());
}
@Test
public void getDynamicForkJoinTasksAndInputException() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList();
Map<String, Object> input = new HashMap<>();
input.put("k1", "v1");
dtasks.add("junit_task_2", null, "xdt1", input);
HashMap<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
dtasks.add("junit_task_3", null, "xdt2", input2);
Map<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("dynamicTasks", dtasks);
// when
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(null);
// then
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput(
dynamicForkJoinToSchedule, new WorkflowModel());
}
@Test
public void getDynamicForkTasksAndInput() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> dynamicTasks =
forkJoinDynamicTaskMapper.getDynamicForkTasksAndInput(
dynamicForkJoinToSchedule, new WorkflowModel(), "dynamicTasks");
// then
assertNotNull(dynamicTasks.getLeft());
}
@Test
public void getDynamicForkTasksAndInputException() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", null);
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
// then
expectedException.expect(TerminateWorkflowException.class);
// when
forkJoinDynamicTaskMapper.getDynamicForkTasksAndInput(
dynamicForkJoinToSchedule, new WorkflowModel(), "dynamicTasks");
}
@Test
public void testDynamicTaskDuplicateTaskRefName() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
def.getTasks().add(join);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// dynamic
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
TaskModel simpleTask1 = new TaskModel();
simpleTask1.setReferenceTaskName("xdt1");
// Empty list, this is a bad state, workflow should terminate
when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0))
.thenReturn(new ArrayList<>());
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(dynamicForkJoinToSchedule)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
}
}
| 6,632 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/sync | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/sync/local/LocalOnlyLockTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.sync.local;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@Ignore
// Test always times out in CI environment
public class LocalOnlyLockTest {
// Lock can be global since it uses global cache internally
private final LocalOnlyLock localOnlyLock = new LocalOnlyLock();
@After
public void tearDown() {
// Clean caches between tests as they are shared globally
localOnlyLock.cache().invalidateAll();
localOnlyLock.scheduledFutures().values().forEach(f -> f.cancel(false));
localOnlyLock.scheduledFutures().clear();
}
@Test
public void testLockUnlock() {
final boolean a = localOnlyLock.acquireLock("a", 100, 10000, TimeUnit.MILLISECONDS);
assertTrue(a);
assertEquals(localOnlyLock.cache().estimatedSize(), 1);
assertEquals(localOnlyLock.cache().get("a").isLocked(), true);
assertEquals(localOnlyLock.scheduledFutures().size(), 1);
localOnlyLock.releaseLock("a");
assertEquals(localOnlyLock.scheduledFutures().size(), 0);
assertEquals(localOnlyLock.cache().get("a").isLocked(), false);
localOnlyLock.deleteLock("a");
assertEquals(localOnlyLock.cache().estimatedSize(), 0);
}
@Test(timeout = 10 * 10_000)
public void testLockTimeout() throws InterruptedException, ExecutionException {
final ExecutorService executor = Executors.newFixedThreadPool(1);
executor.submit(
() -> {
localOnlyLock.acquireLock("c", 100, 1000, TimeUnit.MILLISECONDS);
})
.get();
assertTrue(localOnlyLock.acquireLock("d", 100, 1000, TimeUnit.MILLISECONDS));
assertFalse(localOnlyLock.acquireLock("c", 100, 1000, TimeUnit.MILLISECONDS));
assertEquals(localOnlyLock.scheduledFutures().size(), 2);
executor.submit(
() -> {
localOnlyLock.releaseLock("c");
})
.get();
localOnlyLock.releaseLock("d");
assertEquals(localOnlyLock.scheduledFutures().size(), 0);
}
@Test(timeout = 10 * 10_000)
public void testReleaseFromAnotherThread() throws InterruptedException, ExecutionException {
final ExecutorService executor = Executors.newFixedThreadPool(1);
executor.submit(
() -> {
localOnlyLock.acquireLock("c", 100, 10000, TimeUnit.MILLISECONDS);
})
.get();
try {
localOnlyLock.releaseLock("c");
} catch (IllegalMonitorStateException e) {
// expected
localOnlyLock.deleteLock("c");
return;
} finally {
executor.submit(
() -> {
localOnlyLock.releaseLock("c");
})
.get();
}
fail();
}
@Test(timeout = 10 * 10_000)
public void testLockLeaseWithRelease() throws Exception {
localOnlyLock.acquireLock("b", 1000, 1000, TimeUnit.MILLISECONDS);
localOnlyLock.releaseLock("b");
// Wait for lease to run out and also call release
Thread.sleep(2000);
localOnlyLock.acquireLock("b");
assertEquals(true, localOnlyLock.cache().get("b").isLocked());
localOnlyLock.releaseLock("b");
}
@Test
public void testRelease() {
localOnlyLock.releaseLock("x54as4d2;23'4");
localOnlyLock.releaseLock("x54as4d2;23'4");
assertEquals(false, localOnlyLock.cache().get("x54as4d2;23'4").isLocked());
}
@Test(timeout = 10 * 10_000)
public void testLockLeaseTime() throws InterruptedException {
for (int i = 0; i < 10; i++) {
final Thread thread =
new Thread(
() -> {
localOnlyLock.acquireLock("a", 1000, 100, TimeUnit.MILLISECONDS);
});
thread.start();
thread.join();
}
localOnlyLock.acquireLock("a");
assertTrue(localOnlyLock.cache().get("a").isLocked());
localOnlyLock.releaseLock("a");
localOnlyLock.deleteLock("a");
}
@Test
public void testLockConfiguration() {
new ApplicationContextRunner()
.withPropertyValues("conductor.workflow-execution-lock.type=local_only")
.withUserConfiguration(LocalOnlyLockConfiguration.class)
.run(
context -> {
LocalOnlyLock lock = context.getBean(LocalOnlyLock.class);
assertNotNull(lock);
});
}
}
| 6,633 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/dal/ExecutionDAOFacadeTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.dal;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import org.apache.commons.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.TestDeciderService;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.dao.*;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class ExecutionDAOFacadeTest {
private ExecutionDAO executionDAO;
private IndexDAO indexDAO;
private ExecutionDAOFacade executionDAOFacade;
private ExternalPayloadStorageUtils externalPayloadStorageUtils;
@Autowired private ObjectMapper objectMapper;
@Before
public void setUp() {
executionDAO = mock(ExecutionDAO.class);
QueueDAO queueDAO = mock(QueueDAO.class);
indexDAO = mock(IndexDAO.class);
externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
RateLimitingDAO rateLimitingDao = mock(RateLimitingDAO.class);
ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO =
mock(ConcurrentExecutionLimitDAO.class);
PollDataDAO pollDataDAO = mock(PollDataDAO.class);
ConductorProperties properties = mock(ConductorProperties.class);
when(properties.isEventExecutionIndexingEnabled()).thenReturn(true);
when(properties.isAsyncIndexingEnabled()).thenReturn(true);
executionDAOFacade =
new ExecutionDAOFacade(
executionDAO,
queueDAO,
indexDAO,
rateLimitingDao,
concurrentExecutionLimitDAO,
pollDataDAO,
objectMapper,
properties,
externalPayloadStorageUtils);
}
@Test
public void testGetWorkflow() throws Exception {
when(executionDAO.getWorkflow(any(), anyBoolean())).thenReturn(new WorkflowModel());
Workflow workflow = executionDAOFacade.getWorkflow("workflowId", true);
assertNotNull(workflow);
verify(indexDAO, never()).get(any(), any());
}
@Test
public void testGetWorkflowModel() throws Exception {
when(executionDAO.getWorkflow(any(), anyBoolean())).thenReturn(new WorkflowModel());
WorkflowModel workflowModel = executionDAOFacade.getWorkflowModel("workflowId", true);
assertNotNull(workflowModel);
verify(indexDAO, never()).get(any(), any());
when(executionDAO.getWorkflow(any(), anyBoolean())).thenReturn(null);
InputStream stream = ExecutionDAOFacadeTest.class.getResourceAsStream("/test.json");
byte[] bytes = IOUtils.toByteArray(stream);
String jsonString = new String(bytes);
when(indexDAO.get(any(), any())).thenReturn(jsonString);
workflowModel = executionDAOFacade.getWorkflowModel("wokflowId", true);
assertNotNull(workflowModel);
verify(indexDAO, times(1)).get(any(), any());
}
@Test
public void testGetWorkflowsByCorrelationId() {
when(executionDAO.canSearchAcrossWorkflows()).thenReturn(true);
when(executionDAO.getWorkflowsByCorrelationId(any(), any(), anyBoolean()))
.thenReturn(Collections.singletonList(new WorkflowModel()));
List<Workflow> workflows =
executionDAOFacade.getWorkflowsByCorrelationId(
"workflowName", "correlationId", true);
assertNotNull(workflows);
assertEquals(1, workflows.size());
verify(indexDAO, never())
.searchWorkflows(anyString(), anyString(), anyInt(), anyInt(), any());
when(executionDAO.canSearchAcrossWorkflows()).thenReturn(false);
List<String> workflowIds = new ArrayList<>();
workflowIds.add("workflowId");
SearchResult<String> searchResult = new SearchResult<>();
searchResult.setResults(workflowIds);
when(indexDAO.searchWorkflows(anyString(), anyString(), anyInt(), anyInt(), any()))
.thenReturn(searchResult);
when(executionDAO.getWorkflow("workflowId", true)).thenReturn(new WorkflowModel());
workflows =
executionDAOFacade.getWorkflowsByCorrelationId(
"workflowName", "correlationId", true);
assertNotNull(workflows);
assertEquals(1, workflows.size());
}
@Test
public void testRemoveWorkflow() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("workflowId");
workflow.setStatus(WorkflowModel.Status.COMPLETED);
TaskModel task = new TaskModel();
task.setTaskId("taskId");
workflow.setTasks(Collections.singletonList(task));
when(executionDAO.getWorkflow(anyString(), anyBoolean())).thenReturn(workflow);
executionDAOFacade.removeWorkflow("workflowId", false);
verify(executionDAO, times(1)).removeWorkflow(anyString());
verify(executionDAO, never()).removeTask(anyString());
verify(indexDAO, never()).updateWorkflow(anyString(), any(), any());
verify(indexDAO, never()).updateTask(anyString(), anyString(), any(), any());
verify(indexDAO, times(1)).asyncRemoveWorkflow(anyString());
verify(indexDAO, times(1)).asyncRemoveTask(anyString(), anyString());
}
@Test
public void testArchiveWorkflow() throws Exception {
InputStream stream = TestDeciderService.class.getResourceAsStream("/completed.json");
WorkflowModel workflow = objectMapper.readValue(stream, WorkflowModel.class);
when(executionDAO.getWorkflow(anyString(), anyBoolean())).thenReturn(workflow);
executionDAOFacade.removeWorkflow("workflowId", true);
verify(executionDAO, times(1)).removeWorkflow(anyString());
verify(executionDAO, never()).removeTask(anyString());
verify(indexDAO, times(1)).updateWorkflow(anyString(), any(), any());
verify(indexDAO, times(15)).updateTask(anyString(), anyString(), any(), any());
verify(indexDAO, never()).removeWorkflow(anyString());
verify(indexDAO, never()).removeTask(anyString(), anyString());
}
@Test
public void testAddEventExecution() {
when(executionDAO.addEventExecution(any())).thenReturn(false);
boolean added = executionDAOFacade.addEventExecution(new EventExecution());
assertFalse(added);
verify(indexDAO, never()).addEventExecution(any());
when(executionDAO.addEventExecution(any())).thenReturn(true);
added = executionDAOFacade.addEventExecution(new EventExecution());
assertTrue(added);
verify(indexDAO, times(1)).asyncAddEventExecution(any());
}
@Test(expected = TerminateWorkflowException.class)
public void testUpdateTaskThrowsTerminateWorkflowException() {
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(UUID.randomUUID().toString());
task.setTaskDefName("task1");
doThrow(new TerminateWorkflowException("failed"))
.when(externalPayloadStorageUtils)
.verifyAndUpload(task, ExternalPayloadStorage.PayloadType.TASK_OUTPUT);
executionDAOFacade.updateTask(task);
}
}
| 6,634 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/events/TestDefaultEventProcessor.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.stubbing.Answer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.retry.support.RetryTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.metadata.events.EventHandler;
import com.netflix.conductor.common.metadata.events.EventHandler.Action;
import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type;
import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow;
import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails;
import com.netflix.conductor.core.config.ConductorCoreConfiguration;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.events.queue.ObservableQueue;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.operation.StartWorkflowOperation;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.JsonUtils;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.ExecutionService;
import com.netflix.conductor.service.MetadataService;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(
classes = {
TestObjectMapperConfiguration.class,
TestDefaultEventProcessor.TestConfiguration.class,
ConductorCoreConfiguration.class
})
@RunWith(SpringRunner.class)
public class TestDefaultEventProcessor {
private String event;
private ObservableQueue queue;
private MetadataService metadataService;
private ExecutionService executionService;
private WorkflowExecutor workflowExecutor;
private StartWorkflowOperation startWorkflowOperation;
private ExternalPayloadStorageUtils externalPayloadStorageUtils;
private SimpleActionProcessor actionProcessor;
private ParametersUtils parametersUtils;
private JsonUtils jsonUtils;
private ConductorProperties properties;
private Message message;
@Autowired private Map<String, Evaluator> evaluators;
@Autowired private ObjectMapper objectMapper;
@Autowired
private @Qualifier("onTransientErrorRetryTemplate") RetryTemplate retryTemplate;
@Configuration
@ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans
public static class TestConfiguration {}
@Before
public void setup() {
event = "sqs:arn:account090:sqstest1";
String queueURI = "arn:account090:sqstest1";
metadataService = mock(MetadataService.class);
executionService = mock(ExecutionService.class);
workflowExecutor = mock(WorkflowExecutor.class);
startWorkflowOperation = mock(StartWorkflowOperation.class);
externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
actionProcessor = mock(SimpleActionProcessor.class);
parametersUtils = new ParametersUtils(objectMapper);
jsonUtils = new JsonUtils(objectMapper);
queue = mock(ObservableQueue.class);
message =
new Message(
"t0",
"{\"Type\":\"Notification\",\"MessageId\":\"7e4e6415-01e9-5caf-abaa-37fd05d446ff\",\"Message\":\"{\\n \\\"testKey1\\\": \\\"level1\\\",\\n \\\"metadata\\\": {\\n \\\"testKey2\\\": 123456 }\\n }\",\"Timestamp\":\"2018-08-10T21:22:05.029Z\",\"SignatureVersion\":\"1\"}",
"t0");
when(queue.getURI()).thenReturn(queueURI);
when(queue.getName()).thenReturn(queueURI);
when(queue.getType()).thenReturn("sqs");
properties = mock(ConductorProperties.class);
when(properties.isEventMessageIndexingEnabled()).thenReturn(true);
when(properties.getEventProcessorThreadCount()).thenReturn(2);
}
@Test
public void testEventProcessor() {
// setup event handler
EventHandler eventHandler = new EventHandler();
eventHandler.setName(UUID.randomUUID().toString());
eventHandler.setActive(true);
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "dev");
Action startWorkflowAction = new Action();
startWorkflowAction.setAction(Type.start_workflow);
startWorkflowAction.setStart_workflow(new StartWorkflow());
startWorkflowAction.getStart_workflow().setName("workflow_x");
startWorkflowAction.getStart_workflow().setVersion(1);
startWorkflowAction.getStart_workflow().setTaskToDomain(taskToDomain);
eventHandler.getActions().add(startWorkflowAction);
Action completeTaskAction = new Action();
completeTaskAction.setAction(Type.complete_task);
completeTaskAction.setComplete_task(new TaskDetails());
completeTaskAction.getComplete_task().setTaskRefName("task_x");
completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString());
completeTaskAction.getComplete_task().setOutput(new HashMap<>());
eventHandler.getActions().add(completeTaskAction);
eventHandler.setEvent(event);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(queue.rePublishIfNoAck()).thenReturn(false);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName(startWorkflowAction.getStart_workflow().getName());
startWorkflowInput.setVersion(startWorkflowAction.getStart_workflow().getVersion());
startWorkflowInput.setCorrelationId(
startWorkflowAction.getStart_workflow().getCorrelationId());
startWorkflowInput.setEvent(event);
String id = UUID.randomUUID().toString();
AtomicBoolean started = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
started.set(true);
return id;
})
.when(startWorkflowOperation)
.execute(
argThat(
argument ->
startWorkflowAction
.getStart_workflow()
.getName()
.equals(argument.getName())
&& startWorkflowAction
.getStart_workflow()
.getVersion()
.equals(argument.getVersion())
&& event.equals(argument.getEvent())));
AtomicBoolean completed = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
completed.set(true);
return null;
})
.when(workflowExecutor)
.updateTask(any());
TaskModel task = new TaskModel();
task.setReferenceTaskName(completeTaskAction.getComplete_task().getTaskRefName());
WorkflowModel workflow = new WorkflowModel();
workflow.setTasks(Collections.singletonList(task));
when(workflowExecutor.getWorkflow(
completeTaskAction.getComplete_task().getWorkflowId(), true))
.thenReturn(workflow);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
SimpleActionProcessor actionProcessor =
new SimpleActionProcessor(
workflowExecutor, parametersUtils, jsonUtils, startWorkflowOperation);
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
assertTrue(started.get());
assertTrue(completed.get());
verify(queue, atMost(1)).ack(any());
verify(queue, never()).nack(any());
verify(queue, never()).publish(any());
}
@Test
public void testEventHandlerWithCondition() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName("cms_intermediate_video_ingest_handler");
eventHandler.setActive(true);
eventHandler.setEvent("sqs:dev_cms_asset_ingest_queue");
eventHandler.setCondition(
"$.Message.testKey1 == 'level1' && $.Message.metadata.testKey2 == 123456");
Map<String, Object> workflowInput = new LinkedHashMap<>();
workflowInput.put("param1", "${Message.metadata.testKey2}");
workflowInput.put("param2", "SQS-${MessageId}");
Action startWorkflowAction = new Action();
startWorkflowAction.setAction(Type.start_workflow);
startWorkflowAction.setStart_workflow(new StartWorkflow());
startWorkflowAction.getStart_workflow().setName("cms_artwork_automation");
startWorkflowAction.getStart_workflow().setVersion(1);
startWorkflowAction.getStart_workflow().setInput(workflowInput);
startWorkflowAction.setExpandInlineJSON(true);
eventHandler.getActions().add(startWorkflowAction);
eventHandler.setEvent(event);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(queue.rePublishIfNoAck()).thenReturn(false);
String id = UUID.randomUUID().toString();
AtomicBoolean started = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
started.set(true);
return id;
})
.when(startWorkflowOperation)
.execute(
argThat(
argument ->
startWorkflowAction
.getStart_workflow()
.getName()
.equals(argument.getName())
&& startWorkflowAction
.getStart_workflow()
.getVersion()
.equals(argument.getVersion())
&& event.equals(argument.getEvent())));
SimpleActionProcessor actionProcessor =
new SimpleActionProcessor(
workflowExecutor, parametersUtils, jsonUtils, startWorkflowOperation);
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
assertTrue(started.get());
}
@Test
public void testEventHandlerWithConditionEvaluator() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName("cms_intermediate_video_ingest_handler");
eventHandler.setActive(true);
eventHandler.setEvent("sqs:dev_cms_asset_ingest_queue");
eventHandler.setEvaluatorType(JavascriptEvaluator.NAME);
eventHandler.setCondition(
"$.Message.testKey1 == 'level1' && $.Message.metadata.testKey2 == 123456");
Map<String, Object> workflowInput = new LinkedHashMap<>();
workflowInput.put("param1", "${Message.metadata.testKey2}");
workflowInput.put("param2", "SQS-${MessageId}");
Action startWorkflowAction = new Action();
startWorkflowAction.setAction(Type.start_workflow);
startWorkflowAction.setStart_workflow(new StartWorkflow());
startWorkflowAction.getStart_workflow().setName("cms_artwork_automation");
startWorkflowAction.getStart_workflow().setVersion(1);
startWorkflowAction.getStart_workflow().setInput(workflowInput);
startWorkflowAction.setExpandInlineJSON(true);
eventHandler.getActions().add(startWorkflowAction);
eventHandler.setEvent(event);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(queue.rePublishIfNoAck()).thenReturn(false);
String id = UUID.randomUUID().toString();
AtomicBoolean started = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
started.set(true);
return id;
})
.when(startWorkflowOperation)
.execute(
argThat(
argument ->
startWorkflowAction
.getStart_workflow()
.getName()
.equals(argument.getName())
&& startWorkflowAction
.getStart_workflow()
.getVersion()
.equals(argument.getVersion())
&& event.equals(argument.getEvent())));
SimpleActionProcessor actionProcessor =
new SimpleActionProcessor(
workflowExecutor, parametersUtils, jsonUtils, startWorkflowOperation);
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
assertTrue(started.get());
}
@Test
public void testEventProcessorWithRetriableError() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName(UUID.randomUUID().toString());
eventHandler.setActive(true);
eventHandler.setEvent(event);
Action completeTaskAction = new Action();
completeTaskAction.setAction(Type.complete_task);
completeTaskAction.setComplete_task(new TaskDetails());
completeTaskAction.getComplete_task().setTaskRefName("task_x");
completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString());
completeTaskAction.getComplete_task().setOutput(new HashMap<>());
eventHandler.getActions().add(completeTaskAction);
when(queue.rePublishIfNoAck()).thenReturn(false);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(actionProcessor.execute(any(), any(), any(), any()))
.thenThrow(new TransientException("some retriable error"));
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
verify(queue, never()).ack(any());
verify(queue, never()).nack(any());
verify(queue, atLeastOnce()).publish(any());
}
@Test
public void testEventProcessorWithNonRetriableError() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName(UUID.randomUUID().toString());
eventHandler.setActive(true);
eventHandler.setEvent(event);
Action completeTaskAction = new Action();
completeTaskAction.setAction(Type.complete_task);
completeTaskAction.setComplete_task(new TaskDetails());
completeTaskAction.getComplete_task().setTaskRefName("task_x");
completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString());
completeTaskAction.getComplete_task().setOutput(new HashMap<>());
eventHandler.getActions().add(completeTaskAction);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(actionProcessor.execute(any(), any(), any(), any()))
.thenThrow(new IllegalArgumentException("some non-retriable error"));
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
verify(queue, atMost(1)).ack(any());
verify(queue, never()).publish(any());
}
@Test
public void testExecuteInvalidAction() {
AtomicInteger executeInvoked = new AtomicInteger(0);
doAnswer(
(Answer<Map<String, Object>>)
invocation -> {
executeInvoked.incrementAndGet();
throw new UnsupportedOperationException("error");
})
.when(actionProcessor)
.execute(any(), any(), any(), any());
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
EventExecution eventExecution = new EventExecution("id", "messageId");
eventExecution.setName("handler");
eventExecution.setStatus(EventExecution.Status.IN_PROGRESS);
eventExecution.setEvent("event");
Action action = new Action();
eventExecution.setAction(Type.start_workflow);
eventProcessor.execute(eventExecution, action, "payload");
assertEquals(1, executeInvoked.get());
assertEquals(EventExecution.Status.FAILED, eventExecution.getStatus());
assertNotNull(eventExecution.getOutput().get("exception"));
}
@Test
public void testExecuteNonRetriableException() {
AtomicInteger executeInvoked = new AtomicInteger(0);
doAnswer(
(Answer<Map<String, Object>>)
invocation -> {
executeInvoked.incrementAndGet();
throw new IllegalArgumentException("some non-retriable error");
})
.when(actionProcessor)
.execute(any(), any(), any(), any());
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
EventExecution eventExecution = new EventExecution("id", "messageId");
eventExecution.setStatus(EventExecution.Status.IN_PROGRESS);
eventExecution.setEvent("event");
eventExecution.setName("handler");
Action action = new Action();
action.setAction(Type.start_workflow);
eventExecution.setAction(Type.start_workflow);
eventProcessor.execute(eventExecution, action, "payload");
assertEquals(1, executeInvoked.get());
assertEquals(EventExecution.Status.FAILED, eventExecution.getStatus());
assertNotNull(eventExecution.getOutput().get("exception"));
}
@Test
public void testExecuteTransientException() {
AtomicInteger executeInvoked = new AtomicInteger(0);
doAnswer(
(Answer<Map<String, Object>>)
invocation -> {
executeInvoked.incrementAndGet();
throw new TransientException("some retriable error");
})
.when(actionProcessor)
.execute(any(), any(), any(), any());
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
EventExecution eventExecution = new EventExecution("id", "messageId");
eventExecution.setStatus(EventExecution.Status.IN_PROGRESS);
eventExecution.setEvent("event");
Action action = new Action();
action.setAction(Type.start_workflow);
eventProcessor.execute(eventExecution, action, "payload");
assertEquals(3, executeInvoked.get());
assertNull(eventExecution.getOutput().get("exception"));
}
}
| 6,635 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/events/MockQueueProvider.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import org.springframework.lang.NonNull;
import com.netflix.conductor.core.events.queue.ObservableQueue;
public class MockQueueProvider implements EventQueueProvider {
private final String type;
public MockQueueProvider(String type) {
this.type = type;
}
@Override
public String getQueueType() {
return "mock";
}
@Override
@NonNull
public ObservableQueue getQueue(String queueURI) {
return new MockObservableQueue(queueURI, queueURI, type);
}
}
| 6,636 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/events/MockObservableQueue.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.events.queue.ObservableQueue;
import rx.Observable;
public class MockObservableQueue implements ObservableQueue {
private final String uri;
private final String name;
private final String type;
private final Set<Message> messages = new TreeSet<>(Comparator.comparing(Message::getId));
public MockObservableQueue(String uri, String name, String type) {
this.uri = uri;
this.name = name;
this.type = type;
}
@Override
public Observable<Message> observe() {
return Observable.from(messages);
}
public String getType() {
return type;
}
@Override
public String getName() {
return name;
}
@Override
public String getURI() {
return uri;
}
@Override
public List<String> ack(List<Message> msgs) {
messages.removeAll(msgs);
return msgs.stream().map(Message::getId).collect(Collectors.toList());
}
@Override
public void publish(List<Message> messages) {
this.messages.addAll(messages);
}
@Override
public void setUnackTimeout(Message message, long unackTimeout) {}
@Override
public long size() {
return messages.size();
}
@Override
public String toString() {
return "MockObservableQueue [uri=" + uri + ", name=" + name + ", type=" + type + "]";
}
@Override
public void start() {}
@Override
public void stop() {}
@Override
public boolean isRunning() {
return false;
}
}
| 6,637 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/events/TestSimpleActionProcessor.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.events.EventHandler.Action;
import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type;
import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow;
import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.metadata.tasks.TaskResult.Status;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.operation.StartWorkflowOperation;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.JsonUtils;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class TestSimpleActionProcessor {
private WorkflowExecutor workflowExecutor;
private ExternalPayloadStorageUtils externalPayloadStorageUtils;
private SimpleActionProcessor actionProcessor;
private StartWorkflowOperation startWorkflowOperation;
@Autowired private ObjectMapper objectMapper;
@Before
public void setup() {
externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
workflowExecutor = mock(WorkflowExecutor.class);
startWorkflowOperation = mock(StartWorkflowOperation.class);
actionProcessor =
new SimpleActionProcessor(
workflowExecutor,
new ParametersUtils(objectMapper),
new JsonUtils(objectMapper),
startWorkflowOperation);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
public void testStartWorkflow_correlationId() throws Exception {
StartWorkflow startWorkflow = new StartWorkflow();
startWorkflow.setName("testWorkflow");
startWorkflow.getInput().put("testInput", "${testId}");
startWorkflow.setCorrelationId("${correlationId}");
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "dev");
startWorkflow.setTaskToDomain(taskToDomain);
Action action = new Action();
action.setAction(Type.start_workflow);
action.setStart_workflow(startWorkflow);
Object payload =
objectMapper.readValue(
"{\"correlationId\":\"test-id\", \"testId\":\"test_1\"}", Object.class);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testWorkflow");
workflowDef.setVersion(1);
when(startWorkflowOperation.execute(any())).thenReturn("workflow_1");
Map<String, Object> output =
actionProcessor.execute(action, payload, "testEvent", "testMessage");
assertNotNull(output);
assertEquals("workflow_1", output.get("workflowId"));
ArgumentCaptor<StartWorkflowInput> startWorkflowInputArgumentCaptor =
ArgumentCaptor.forClass(StartWorkflowInput.class);
verify(startWorkflowOperation).execute(startWorkflowInputArgumentCaptor.capture());
StartWorkflowInput capturedValue = startWorkflowInputArgumentCaptor.getValue();
assertEquals("test_1", capturedValue.getWorkflowInput().get("testInput"));
assertEquals("test-id", capturedValue.getCorrelationId());
assertEquals(
"testMessage", capturedValue.getWorkflowInput().get("conductor.event.messageId"));
assertEquals("testEvent", capturedValue.getWorkflowInput().get("conductor.event.name"));
assertEquals(taskToDomain, capturedValue.getTaskToDomain());
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
public void testStartWorkflow() throws Exception {
StartWorkflow startWorkflow = new StartWorkflow();
startWorkflow.setName("testWorkflow");
startWorkflow.getInput().put("testInput", "${testId}");
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "dev");
startWorkflow.setTaskToDomain(taskToDomain);
Action action = new Action();
action.setAction(Type.start_workflow);
action.setStart_workflow(startWorkflow);
Object payload = objectMapper.readValue("{\"testId\":\"test_1\"}", Object.class);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testWorkflow");
workflowDef.setVersion(1);
when(startWorkflowOperation.execute(any())).thenReturn("workflow_1");
Map<String, Object> output =
actionProcessor.execute(action, payload, "testEvent", "testMessage");
assertNotNull(output);
assertEquals("workflow_1", output.get("workflowId"));
ArgumentCaptor<StartWorkflowInput> startWorkflowInputArgumentCaptor =
ArgumentCaptor.forClass(StartWorkflowInput.class);
verify(startWorkflowOperation).execute(startWorkflowInputArgumentCaptor.capture());
StartWorkflowInput capturedArgument = startWorkflowInputArgumentCaptor.getValue();
assertEquals("test_1", capturedArgument.getWorkflowInput().get("testInput"));
assertNull(capturedArgument.getCorrelationId());
assertEquals(
"testMessage",
capturedArgument.getWorkflowInput().get("conductor.event.messageId"));
assertEquals("testEvent", capturedArgument.getWorkflowInput().get("conductor.event.name"));
assertEquals(taskToDomain, capturedArgument.getTaskToDomain());
}
@Test
public void testCompleteTask() throws Exception {
TaskDetails taskDetails = new TaskDetails();
taskDetails.setWorkflowId("${workflowId}");
taskDetails.setTaskRefName("testTask");
taskDetails.getOutput().put("someNEKey", "${Message.someNEKey}");
taskDetails.getOutput().put("someKey", "${Message.someKey}");
taskDetails.getOutput().put("someNullKey", "${Message.someNullKey}");
Action action = new Action();
action.setAction(Type.complete_task);
action.setComplete_task(taskDetails);
String payloadJson =
"{\"workflowId\":\"workflow_1\",\"Message\":{\"someKey\":\"someData\",\"someNullKey\":null}}";
Object payload = objectMapper.readValue(payloadJson, Object.class);
TaskModel task = new TaskModel();
task.setReferenceTaskName("testTask");
WorkflowModel workflow = new WorkflowModel();
workflow.getTasks().add(task);
when(workflowExecutor.getWorkflow(eq("workflow_1"), anyBoolean())).thenReturn(workflow);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
actionProcessor.execute(action, payload, "testEvent", "testMessage");
ArgumentCaptor<TaskResult> argumentCaptor = ArgumentCaptor.forClass(TaskResult.class);
verify(workflowExecutor).updateTask(argumentCaptor.capture());
assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus());
assertEquals(
"testMessage",
argumentCaptor.getValue().getOutputData().get("conductor.event.messageId"));
assertEquals(
"testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name"));
assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId"));
assertEquals("testTask", argumentCaptor.getValue().getOutputData().get("taskRefName"));
assertEquals("someData", argumentCaptor.getValue().getOutputData().get("someKey"));
// Assert values not in message are evaluated to null
assertTrue("testTask", argumentCaptor.getValue().getOutputData().containsKey("someNEKey"));
// Assert null values from message are kept
assertTrue(
"testTask", argumentCaptor.getValue().getOutputData().containsKey("someNullKey"));
assertNull("testTask", argumentCaptor.getValue().getOutputData().get("someNullKey"));
}
@Test
public void testCompleteLoopOverTask() throws Exception {
TaskDetails taskDetails = new TaskDetails();
taskDetails.setWorkflowId("${workflowId}");
taskDetails.setTaskRefName("testTask");
taskDetails.getOutput().put("someNEKey", "${Message.someNEKey}");
taskDetails.getOutput().put("someKey", "${Message.someKey}");
taskDetails.getOutput().put("someNullKey", "${Message.someNullKey}");
Action action = new Action();
action.setAction(Type.complete_task);
action.setComplete_task(taskDetails);
String payloadJson =
"{\"workflowId\":\"workflow_1\", \"taskRefName\":\"testTask\", \"Message\":{\"someKey\":\"someData\",\"someNullKey\":null}}";
Object payload = objectMapper.readValue(payloadJson, Object.class);
TaskModel task = new TaskModel();
task.setIteration(1);
task.setReferenceTaskName("testTask__1");
WorkflowModel workflow = new WorkflowModel();
workflow.getTasks().add(task);
when(workflowExecutor.getWorkflow(eq("workflow_1"), anyBoolean())).thenReturn(workflow);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
actionProcessor.execute(action, payload, "testEvent", "testMessage");
ArgumentCaptor<TaskResult> argumentCaptor = ArgumentCaptor.forClass(TaskResult.class);
verify(workflowExecutor).updateTask(argumentCaptor.capture());
assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus());
assertEquals(
"testMessage",
argumentCaptor.getValue().getOutputData().get("conductor.event.messageId"));
assertEquals(
"testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name"));
assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId"));
assertEquals("testTask", argumentCaptor.getValue().getOutputData().get("taskRefName"));
assertEquals("someData", argumentCaptor.getValue().getOutputData().get("someKey"));
// Assert values not in message are evaluated to null
assertTrue("testTask", argumentCaptor.getValue().getOutputData().containsKey("someNEKey"));
// Assert null values from message are kept
assertTrue(
"testTask", argumentCaptor.getValue().getOutputData().containsKey("someNullKey"));
assertNull("testTask", argumentCaptor.getValue().getOutputData().get("someNullKey"));
}
@Test
public void testCompleteTaskByTaskId() throws Exception {
TaskDetails taskDetails = new TaskDetails();
taskDetails.setWorkflowId("${workflowId}");
taskDetails.setTaskId("${taskId}");
Action action = new Action();
action.setAction(Type.complete_task);
action.setComplete_task(taskDetails);
Object payload =
objectMapper.readValue(
"{\"workflowId\":\"workflow_1\", \"taskId\":\"task_1\"}", Object.class);
TaskModel task = new TaskModel();
task.setTaskId("task_1");
task.setReferenceTaskName("testTask");
when(workflowExecutor.getTask(eq("task_1"))).thenReturn(task);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
actionProcessor.execute(action, payload, "testEvent", "testMessage");
ArgumentCaptor<TaskResult> argumentCaptor = ArgumentCaptor.forClass(TaskResult.class);
verify(workflowExecutor).updateTask(argumentCaptor.capture());
assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus());
assertEquals(
"testMessage",
argumentCaptor.getValue().getOutputData().get("conductor.event.messageId"));
assertEquals(
"testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name"));
assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId"));
assertEquals("task_1", argumentCaptor.getValue().getOutputData().get("taskId"));
}
}
| 6,638 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/events/TestScriptEval.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestScriptEval {
@Test
public void testScript() throws Exception {
Map<String, Object> payload = new HashMap<>();
Map<String, Object> app = new HashMap<>();
app.put("name", "conductor");
app.put("version", 2.0);
app.put("license", "Apache 2.0");
payload.put("app", app);
payload.put("author", "Netflix");
payload.put("oss", true);
String script1 = "$.app.name == 'conductor'"; // true
String script2 = "$.version > 3"; // false
String script3 = "$.oss"; // true
String script4 = "$.author == 'me'"; // false
assertTrue(ScriptEvaluator.evalBool(script1, payload));
assertFalse(ScriptEvaluator.evalBool(script2, payload));
assertTrue(ScriptEvaluator.evalBool(script3, payload));
assertFalse(ScriptEvaluator.evalBool(script4, payload));
}
}
| 6,639 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core | Create_ds/conductor/core/src/test/java/com/netflix/conductor/core/metadata/MetadataMapperServiceTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.metadata;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import javax.validation.ConstraintViolationException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.dao.MetadataDAO;
import static com.netflix.conductor.TestUtils.getConstraintViolationMessages;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@SuppressWarnings("SpringJavaAutowiredMembersInspection")
@RunWith(SpringRunner.class)
@EnableAutoConfiguration
public class MetadataMapperServiceTest {
@TestConfiguration
static class TestMetadataMapperServiceConfiguration {
@Bean
public MetadataDAO metadataDAO() {
return mock(MetadataDAO.class);
}
@Bean
public MetadataMapperService metadataMapperService(MetadataDAO metadataDAO) {
return new MetadataMapperService(metadataDAO);
}
}
@Autowired private MetadataDAO metadataDAO;
@Autowired private MetadataMapperService metadataMapperService;
@After
public void cleanUp() {
reset(metadataDAO);
}
@Test
public void testMetadataPopulationOnSimpleTask() {
String nameTaskDefinition = "task1";
TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition);
WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition);
when(metadataDAO.getTaskDef(nameTaskDefinition)).thenReturn(taskDefinition);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask));
metadataMapperService.populateTaskDefinitions(workflowDefinition);
assertEquals(1, workflowDefinition.getTasks().size());
WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0);
assertNotNull(populatedWorkflowTask.getTaskDefinition());
verify(metadataDAO).getTaskDef(nameTaskDefinition);
}
@Test
public void testNoMetadataPopulationOnEmbeddedTaskDefinition() {
String nameTaskDefinition = "task2";
TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition);
WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition);
workflowTask.setTaskDefinition(taskDefinition);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask));
metadataMapperService.populateTaskDefinitions(workflowDefinition);
assertEquals(1, workflowDefinition.getTasks().size());
WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0);
assertNotNull(populatedWorkflowTask.getTaskDefinition());
verifyNoInteractions(metadataDAO);
}
@Test
public void testMetadataPopulationOnlyOnNecessaryWorkflowTasks() {
String nameTaskDefinition1 = "task4";
TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition1);
WorkflowTask workflowTask1 = createWorkflowTask(nameTaskDefinition1);
workflowTask1.setTaskDefinition(taskDefinition);
String nameTaskDefinition2 = "task5";
WorkflowTask workflowTask2 = createWorkflowTask(nameTaskDefinition2);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask1, workflowTask2));
when(metadataDAO.getTaskDef(nameTaskDefinition2)).thenReturn(taskDefinition);
metadataMapperService.populateTaskDefinitions(workflowDefinition);
assertEquals(2, workflowDefinition.getTasks().size());
List<WorkflowTask> workflowTasks = workflowDefinition.getTasks();
assertNotNull(workflowTasks.get(0).getTaskDefinition());
assertNotNull(workflowTasks.get(1).getTaskDefinition());
verify(metadataDAO).getTaskDef(nameTaskDefinition2);
verifyNoMoreInteractions(metadataDAO);
}
@Test
public void testMetadataPopulationMissingDefinitions() {
String nameTaskDefinition1 = "task4";
WorkflowTask workflowTask1 = createWorkflowTask(nameTaskDefinition1);
String nameTaskDefinition2 = "task5";
WorkflowTask workflowTask2 = createWorkflowTask(nameTaskDefinition2);
TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition1);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask1, workflowTask2));
when(metadataDAO.getTaskDef(nameTaskDefinition1)).thenReturn(taskDefinition);
when(metadataDAO.getTaskDef(nameTaskDefinition2)).thenReturn(null);
try {
metadataMapperService.populateTaskDefinitions(workflowDefinition);
} catch (NotFoundException nfe) {
fail("Missing TaskDefinitions are not defaulted");
}
}
@Test
public void testVersionPopulationForSubworkflowTaskIfVersionIsNotAvailable() {
String nameTaskDefinition = "taskSubworkflow6";
String workflowDefinitionName = "subworkflow";
int version = 3;
WorkflowDef subWorkflowDefinition = createWorkflowDefinition("workflowDefinitionName");
subWorkflowDefinition.setVersion(version);
WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition);
workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW);
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName(workflowDefinitionName);
workflowTask.setSubWorkflowParam(subWorkflowParams);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask));
when(metadataDAO.getLatestWorkflowDef(workflowDefinitionName))
.thenReturn(Optional.of(subWorkflowDefinition));
metadataMapperService.populateTaskDefinitions(workflowDefinition);
assertEquals(1, workflowDefinition.getTasks().size());
List<WorkflowTask> workflowTasks = workflowDefinition.getTasks();
SubWorkflowParams params = workflowTasks.get(0).getSubWorkflowParam();
assertEquals(workflowDefinitionName, params.getName());
assertEquals(version, params.getVersion().intValue());
verify(metadataDAO).getLatestWorkflowDef(workflowDefinitionName);
verify(metadataDAO).getTaskDef(nameTaskDefinition);
verifyNoMoreInteractions(metadataDAO);
}
@Test
public void testNoVersionPopulationForSubworkflowTaskIfAvailable() {
String nameTaskDefinition = "taskSubworkflow7";
String workflowDefinitionName = "subworkflow";
Integer version = 2;
WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition);
workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW);
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName(workflowDefinitionName);
subWorkflowParams.setVersion(version);
workflowTask.setSubWorkflowParam(subWorkflowParams);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask));
metadataMapperService.populateTaskDefinitions(workflowDefinition);
assertEquals(1, workflowDefinition.getTasks().size());
List<WorkflowTask> workflowTasks = workflowDefinition.getTasks();
SubWorkflowParams params = workflowTasks.get(0).getSubWorkflowParam();
assertEquals(workflowDefinitionName, params.getName());
assertEquals(version, params.getVersion());
verify(metadataDAO).getTaskDef(nameTaskDefinition);
verifyNoMoreInteractions(metadataDAO);
}
@Test(expected = TerminateWorkflowException.class)
public void testExceptionWhenWorkflowDefinitionNotAvailable() {
String nameTaskDefinition = "taskSubworkflow8";
String workflowDefinitionName = "subworkflow";
WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition);
workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW);
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName(workflowDefinitionName);
workflowTask.setSubWorkflowParam(subWorkflowParams);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask));
when(metadataDAO.getLatestWorkflowDef(workflowDefinitionName)).thenReturn(Optional.empty());
metadataMapperService.populateTaskDefinitions(workflowDefinition);
verify(metadataDAO).getLatestWorkflowDef(workflowDefinitionName);
}
@Test(expected = IllegalArgumentException.class)
public void testLookupWorkflowDefinition() {
try {
String workflowName = "test";
when(metadataDAO.getWorkflowDef(workflowName, 0))
.thenReturn(Optional.of(new WorkflowDef()));
Optional<WorkflowDef> optionalWorkflowDef =
metadataMapperService.lookupWorkflowDefinition(workflowName, 0);
assertTrue(optionalWorkflowDef.isPresent());
metadataMapperService.lookupWorkflowDefinition(null, 0);
} catch (ConstraintViolationException ex) {
Assert.assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowIds list cannot be null."));
}
}
@Test(expected = IllegalArgumentException.class)
public void testLookupLatestWorkflowDefinition() {
String workflowName = "test";
when(metadataDAO.getLatestWorkflowDef(workflowName))
.thenReturn(Optional.of(new WorkflowDef()));
Optional<WorkflowDef> optionalWorkflowDef =
metadataMapperService.lookupLatestWorkflowDefinition(workflowName);
assertTrue(optionalWorkflowDef.isPresent());
metadataMapperService.lookupLatestWorkflowDefinition(null);
}
@Test
public void testShouldNotPopulateTaskDefinition() {
WorkflowTask workflowTask = createWorkflowTask("");
assertFalse(metadataMapperService.shouldPopulateTaskDefinition(workflowTask));
}
@Test
public void testShouldPopulateTaskDefinition() {
WorkflowTask workflowTask = createWorkflowTask("test");
assertTrue(metadataMapperService.shouldPopulateTaskDefinition(workflowTask));
}
@Test
public void testMetadataPopulationOnSimpleTaskDefMissing() {
String nameTaskDefinition = "task1";
WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition);
when(metadataDAO.getTaskDef(nameTaskDefinition)).thenReturn(null);
WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
workflowDefinition.setTasks(List.of(workflowTask));
metadataMapperService.populateTaskDefinitions(workflowDefinition);
assertEquals(1, workflowDefinition.getTasks().size());
WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0);
assertNotNull(populatedWorkflowTask.getTaskDefinition());
}
private WorkflowDef createWorkflowDefinition(String name) {
WorkflowDef workflowDefinition = new WorkflowDef();
workflowDefinition.setName(name);
return workflowDefinition;
}
private WorkflowTask createWorkflowTask(String name) {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName(name);
workflowTask.setType(TaskType.SIMPLE.name());
return workflowTask;
}
private TaskDef createTaskDefinition(String name) {
return new TaskDef(name);
}
}
| 6,640 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.*;
public abstract class ExecutionDAOTest {
protected abstract ExecutionDAO getExecutionDAO();
protected ConcurrentExecutionLimitDAO getConcurrentExecutionLimitDAO() {
return (ConcurrentExecutionLimitDAO) getExecutionDAO();
}
@Rule public ExpectedException expectedException = ExpectedException.none();
@Test
public void testTaskExceedsLimit() {
TaskDef taskDefinition = new TaskDef();
taskDefinition.setName("task1");
taskDefinition.setConcurrentExecLimit(1);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("task1");
workflowTask.setTaskDefinition(taskDefinition);
workflowTask.setTaskDefinition(taskDefinition);
List<TaskModel> tasks = new LinkedList<>();
for (int i = 0; i < 15; i++) {
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(i + 1);
task.setTaskId("t_" + i);
task.setWorkflowInstanceId("workflow_" + i);
task.setReferenceTaskName("task1");
task.setTaskDefName("task1");
tasks.add(task);
task.setStatus(TaskModel.Status.SCHEDULED);
task.setWorkflowTask(workflowTask);
}
getExecutionDAO().createTasks(tasks);
assertFalse(getConcurrentExecutionLimitDAO().exceedsLimit(tasks.get(0)));
tasks.get(0).setStatus(TaskModel.Status.IN_PROGRESS);
getExecutionDAO().updateTask(tasks.get(0));
for (TaskModel task : tasks) {
assertTrue(getConcurrentExecutionLimitDAO().exceedsLimit(task));
}
}
@Test
public void testCreateTaskException() {
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(UUID.randomUUID().toString());
task.setTaskDefName("task1");
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Workflow instance id cannot be null");
getExecutionDAO().createTasks(List.of(task));
task.setWorkflowInstanceId(UUID.randomUUID().toString());
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Task reference name cannot be null");
getExecutionDAO().createTasks(List.of(task));
}
@Test
public void testCreateTaskException2() {
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(UUID.randomUUID().toString());
task.setTaskDefName("task1");
task.setWorkflowInstanceId(UUID.randomUUID().toString());
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Task reference name cannot be null");
getExecutionDAO().createTasks(Collections.singletonList(task));
}
@Test
public void testTaskCreateDups() {
List<TaskModel> tasks = new LinkedList<>();
String workflowId = UUID.randomUUID().toString();
for (int i = 0; i < 3; i++) {
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(i + 1);
task.setTaskId(workflowId + "_t" + i);
task.setReferenceTaskName("t" + i);
task.setRetryCount(0);
task.setWorkflowInstanceId(workflowId);
task.setTaskDefName("task" + i);
task.setStatus(TaskModel.Status.IN_PROGRESS);
tasks.add(task);
}
// Let's insert a retried task
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(workflowId + "_t" + 2);
task.setReferenceTaskName("t" + 2);
task.setRetryCount(1);
task.setWorkflowInstanceId(workflowId);
task.setTaskDefName("task" + 2);
task.setStatus(TaskModel.Status.IN_PROGRESS);
tasks.add(task);
// Duplicate task!
task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(workflowId + "_t" + 1);
task.setReferenceTaskName("t" + 1);
task.setRetryCount(0);
task.setWorkflowInstanceId(workflowId);
task.setTaskDefName("task" + 1);
task.setStatus(TaskModel.Status.IN_PROGRESS);
tasks.add(task);
List<TaskModel> created = getExecutionDAO().createTasks(tasks);
assertEquals(tasks.size() - 1, created.size()); // 1 less
Set<String> srcIds =
tasks.stream()
.map(t -> t.getReferenceTaskName() + "." + t.getRetryCount())
.collect(Collectors.toSet());
Set<String> createdIds =
created.stream()
.map(t -> t.getReferenceTaskName() + "." + t.getRetryCount())
.collect(Collectors.toSet());
assertEquals(srcIds, createdIds);
List<TaskModel> pending = getExecutionDAO().getPendingTasksByWorkflow("task0", workflowId);
assertNotNull(pending);
assertEquals(1, pending.size());
assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), pending.get(0)));
List<TaskModel> found = getExecutionDAO().getTasks(tasks.get(0).getTaskDefName(), null, 1);
assertNotNull(found);
assertEquals(1, found.size());
assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), found.get(0)));
}
@Test
public void testTaskOps() {
List<TaskModel> tasks = new LinkedList<>();
String workflowId = UUID.randomUUID().toString();
for (int i = 0; i < 3; i++) {
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(workflowId + "_t" + i);
task.setReferenceTaskName("testTaskOps" + i);
task.setRetryCount(0);
task.setWorkflowInstanceId(workflowId);
task.setTaskDefName("testTaskOps" + i);
task.setStatus(TaskModel.Status.IN_PROGRESS);
tasks.add(task);
}
for (int i = 0; i < 3; i++) {
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId("x" + workflowId + "_t" + i);
task.setReferenceTaskName("testTaskOps" + i);
task.setRetryCount(0);
task.setWorkflowInstanceId("x" + workflowId);
task.setTaskDefName("testTaskOps" + i);
task.setStatus(TaskModel.Status.IN_PROGRESS);
getExecutionDAO().createTasks(Collections.singletonList(task));
}
List<TaskModel> created = getExecutionDAO().createTasks(tasks);
assertEquals(tasks.size(), created.size());
List<TaskModel> pending =
getExecutionDAO().getPendingTasksForTaskType(tasks.get(0).getTaskDefName());
assertNotNull(pending);
assertEquals(2, pending.size());
// Pending list can come in any order. finding the one we are looking for and then
// comparing
TaskModel matching =
pending.stream()
.filter(task -> task.getTaskId().equals(tasks.get(0).getTaskId()))
.findAny()
.get();
assertTrue(EqualsBuilder.reflectionEquals(matching, tasks.get(0)));
for (int i = 0; i < 3; i++) {
TaskModel found = getExecutionDAO().getTask(workflowId + "_t" + i);
assertNotNull(found);
found.addOutput("updated", true);
found.setStatus(TaskModel.Status.COMPLETED);
getExecutionDAO().updateTask(found);
}
List<String> taskIds =
tasks.stream().map(TaskModel::getTaskId).collect(Collectors.toList());
List<TaskModel> found = getExecutionDAO().getTasks(taskIds);
assertEquals(taskIds.size(), found.size());
found.forEach(
task -> {
assertTrue(task.getOutputData().containsKey("updated"));
assertEquals(true, task.getOutputData().get("updated"));
boolean removed = getExecutionDAO().removeTask(task.getTaskId());
assertTrue(removed);
});
found = getExecutionDAO().getTasks(taskIds);
assertTrue(found.isEmpty());
}
@Test
public void testPending() {
WorkflowDef def = new WorkflowDef();
def.setName("pending_count_test");
WorkflowModel workflow = createTestWorkflow();
workflow.setWorkflowDefinition(def);
List<String> workflowIds = generateWorkflows(workflow, 10);
long count = getExecutionDAO().getPendingWorkflowCount(def.getName());
assertEquals(10, count);
for (int i = 0; i < 10; i++) {
getExecutionDAO().removeFromPendingWorkflow(def.getName(), workflowIds.get(i));
}
count = getExecutionDAO().getPendingWorkflowCount(def.getName());
assertEquals(0, count);
}
@Test
public void complexExecutionTest() {
WorkflowModel workflow = createTestWorkflow();
int numTasks = workflow.getTasks().size();
String workflowId = getExecutionDAO().createWorkflow(workflow);
assertEquals(workflow.getWorkflowId(), workflowId);
List<TaskModel> created = getExecutionDAO().createTasks(workflow.getTasks());
assertEquals(workflow.getTasks().size(), created.size());
WorkflowModel workflowWithTasks =
getExecutionDAO().getWorkflow(workflow.getWorkflowId(), true);
assertEquals(workflowId, workflowWithTasks.getWorkflowId());
assertEquals(numTasks, workflowWithTasks.getTasks().size());
WorkflowModel found = getExecutionDAO().getWorkflow(workflowId, false);
assertTrue(found.getTasks().isEmpty());
workflow.getTasks().clear();
assertEquals(workflow, found);
workflow.getInput().put("updated", true);
getExecutionDAO().updateWorkflow(workflow);
found = getExecutionDAO().getWorkflow(workflowId);
assertNotNull(found);
assertTrue(found.getInput().containsKey("updated"));
assertEquals(true, found.getInput().get("updated"));
List<String> running =
getExecutionDAO()
.getRunningWorkflowIds(
workflow.getWorkflowName(), workflow.getWorkflowVersion());
assertNotNull(running);
assertTrue(running.isEmpty());
workflow.setStatus(WorkflowModel.Status.RUNNING);
getExecutionDAO().updateWorkflow(workflow);
running =
getExecutionDAO()
.getRunningWorkflowIds(
workflow.getWorkflowName(), workflow.getWorkflowVersion());
assertNotNull(running);
assertEquals(1, running.size());
assertEquals(workflow.getWorkflowId(), running.get(0));
List<WorkflowModel> pending =
getExecutionDAO()
.getPendingWorkflowsByType(
workflow.getWorkflowName(), workflow.getWorkflowVersion());
assertNotNull(pending);
assertEquals(1, pending.size());
assertEquals(3, pending.get(0).getTasks().size());
pending.get(0).getTasks().clear();
assertEquals(workflow, pending.get(0));
workflow.setStatus(WorkflowModel.Status.COMPLETED);
getExecutionDAO().updateWorkflow(workflow);
running =
getExecutionDAO()
.getRunningWorkflowIds(
workflow.getWorkflowName(), workflow.getWorkflowVersion());
assertNotNull(running);
assertTrue(running.isEmpty());
List<WorkflowModel> bytime =
getExecutionDAO()
.getWorkflowsByType(
workflow.getWorkflowName(),
System.currentTimeMillis(),
System.currentTimeMillis() + 100);
assertNotNull(bytime);
assertTrue(bytime.isEmpty());
bytime =
getExecutionDAO()
.getWorkflowsByType(
workflow.getWorkflowName(),
workflow.getCreateTime() - 10,
workflow.getCreateTime() + 10);
assertNotNull(bytime);
assertEquals(1, bytime.size());
}
protected WorkflowModel createTestWorkflow() {
WorkflowDef def = new WorkflowDef();
def.setName("Junit Workflow");
def.setVersion(3);
def.setSchemaVersion(2);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
workflow.setCorrelationId("correlationX");
workflow.setCreatedBy("junit_tester");
workflow.setEndTime(200L);
Map<String, Object> input = new HashMap<>();
input.put("param1", "param1 value");
input.put("param2", 100);
workflow.setInput(input);
Map<String, Object> output = new HashMap<>();
output.put("ouput1", "output 1 value");
output.put("op2", 300);
workflow.setOutput(output);
workflow.setOwnerApp("workflow");
workflow.setParentWorkflowId("parentWorkflowId");
workflow.setParentWorkflowTaskId("parentWFTaskId");
workflow.setReasonForIncompletion("missing recipe");
workflow.setReRunFromWorkflowId("re-run from id1");
workflow.setCreateTime(90L);
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow.setWorkflowId(UUID.randomUUID().toString());
List<TaskModel> tasks = new LinkedList<>();
TaskModel task = new TaskModel();
task.setScheduledTime(1L);
task.setSeq(1);
task.setTaskId(UUID.randomUUID().toString());
task.setReferenceTaskName("t1");
task.setWorkflowInstanceId(workflow.getWorkflowId());
task.setTaskDefName("task1");
TaskModel task2 = new TaskModel();
task2.setScheduledTime(2L);
task2.setSeq(2);
task2.setTaskId(UUID.randomUUID().toString());
task2.setReferenceTaskName("t2");
task2.setWorkflowInstanceId(workflow.getWorkflowId());
task2.setTaskDefName("task2");
TaskModel task3 = new TaskModel();
task3.setScheduledTime(2L);
task3.setSeq(3);
task3.setTaskId(UUID.randomUUID().toString());
task3.setReferenceTaskName("t3");
task3.setWorkflowInstanceId(workflow.getWorkflowId());
task3.setTaskDefName("task3");
tasks.add(task);
tasks.add(task2);
tasks.add(task3);
workflow.setTasks(tasks);
workflow.setUpdatedBy("junit_tester");
workflow.setUpdatedTime(800L);
return workflow;
}
protected List<String> generateWorkflows(WorkflowModel base, int count) {
List<String> workflowIds = new ArrayList<>();
for (int i = 0; i < count; i++) {
String workflowId = UUID.randomUUID().toString();
base.setWorkflowId(workflowId);
base.setCorrelationId("corr001");
base.setStatus(WorkflowModel.Status.RUNNING);
getExecutionDAO().createWorkflow(base);
workflowIds.add(workflowId);
}
return workflowIds;
}
}
| 6,641 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/dao/PollDataDAOTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.List;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.PollData;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public abstract class PollDataDAOTest {
protected abstract PollDataDAO getPollDataDAO();
@Test
public void testPollData() {
getPollDataDAO().updateLastPollData("taskDef", null, "workerId1");
PollData pollData = getPollDataDAO().getPollData("taskDef", null);
assertNotNull(pollData);
assertTrue(pollData.getLastPollTime() > 0);
assertEquals(pollData.getQueueName(), "taskDef");
assertNull(pollData.getDomain());
assertEquals(pollData.getWorkerId(), "workerId1");
getPollDataDAO().updateLastPollData("taskDef", "domain1", "workerId1");
pollData = getPollDataDAO().getPollData("taskDef", "domain1");
assertNotNull(pollData);
assertTrue(pollData.getLastPollTime() > 0);
assertEquals(pollData.getQueueName(), "taskDef");
assertEquals(pollData.getDomain(), "domain1");
assertEquals(pollData.getWorkerId(), "workerId1");
List<PollData> pData = getPollDataDAO().getPollData("taskDef");
assertEquals(pData.size(), 2);
pollData = getPollDataDAO().getPollData("taskDef", "domain2");
assertNull(pollData);
}
}
| 6,642 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraintTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.validations;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import javax.validation.executable.ExecutableValidator;
import org.apache.bval.jsr.ApacheValidationProvider;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.tasks.Terminate;
import com.netflix.conductor.dao.MetadataDAO;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
public class WorkflowTaskTypeConstraintTest {
private static Validator validator;
private static ValidatorFactory validatorFactory;
private MetadataDAO mockMetadataDao;
@BeforeClass
public static void init() {
validatorFactory =
Validation.byProvider(ApacheValidationProvider.class)
.configure()
.buildValidatorFactory();
validator = validatorFactory.getValidator();
}
@AfterClass
public static void close() {
validatorFactory.close();
}
@Before
public void setUp() {
mockMetadataDao = Mockito.mock(MetadataDAO.class);
ValidationContext.initialize(mockMetadataDao);
}
@Test
public void testWorkflowTaskMissingReferenceName() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setDynamicForkTasksParam("taskList");
workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam");
workflowTask.setTaskReferenceName(null);
Set<ConstraintViolation<Object>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
assertEquals(
result.iterator().next().getMessage(),
"WorkflowTask taskReferenceName name cannot be empty or null");
}
@Test
public void testWorkflowTaskTestSetType() throws NoSuchMethodException {
WorkflowTask workflowTask = createSampleWorkflowTask();
Method method = WorkflowTask.class.getMethod("setType", String.class);
Object[] parameterValues = {""};
ExecutableValidator executableValidator = validator.forExecutables();
Set<ConstraintViolation<Object>> result =
executableValidator.validateParameters(workflowTask, method, parameterValues);
assertEquals(1, result.size());
assertEquals(
result.iterator().next().getMessage(), "WorkTask type cannot be null or empty");
}
@Test
public void testWorkflowTaskTypeEvent() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("EVENT");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
assertEquals(
result.iterator().next().getMessage(),
"sink field is required for taskType: EVENT taskName: encode");
}
@Test
public void testWorkflowTaskTypeDynamic() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("DYNAMIC");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
assertEquals(
result.iterator().next().getMessage(),
"dynamicTaskNameParam field is required for taskType: DYNAMIC taskName: encode");
}
@Test
public void testWorkflowTaskTypeDecision() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("DECISION");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(2, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"decisionCases should have atleast one task for taskType: DECISION taskName: encode"));
assertTrue(
validationErrors.contains(
"caseValueParam or caseExpression field is required for taskType: DECISION taskName: encode"));
}
@Test
public void testWorkflowTaskTypeDoWhile() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("DO_WHILE");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(2, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"loopExpression field is required for taskType: DO_WHILE taskName: encode"));
assertTrue(
validationErrors.contains(
"loopover field is required for taskType: DO_WHILE taskName: encode"));
}
@Test
public void testWorkflowTaskTypeWait() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("WAIT");
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
workflowTask.setInputParameters(Map.of("duration", "10s", "until", "2022-04-16"));
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"Both 'duration' and 'until' specified. Please provide only one input"));
}
@Test
public void testWorkflowTaskTypeDecisionWithCaseParam() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("DECISION");
workflowTask.setCaseExpression("$.valueCheck == null ? 'true': 'false'");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"decisionCases should have atleast one task for taskType: DECISION taskName: encode"));
}
@Test
public void testWorkflowTaskTypeForJoinDynamic() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("FORK_JOIN_DYNAMIC");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(2, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"dynamicForkTasksInputParamName field is required for taskType: FORK_JOIN_DYNAMIC taskName: encode"));
assertTrue(
validationErrors.contains(
"dynamicForkTasksParam field is required for taskType: FORK_JOIN_DYNAMIC taskName: encode"));
}
@Test
public void testWorkflowTaskTypeForJoinDynamicLegacy() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("FORK_JOIN_DYNAMIC");
workflowTask.setDynamicForkJoinTasksParam("taskList");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeForJoinDynamicWithForJoinTaskParam() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("FORK_JOIN_DYNAMIC");
workflowTask.setDynamicForkJoinTasksParam("taskList");
workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"dynamicForkJoinTasksParam or combination of dynamicForkTasksInputParamName and dynamicForkTasksParam cam be used for taskType: FORK_JOIN_DYNAMIC taskName: encode"));
}
@Test
public void testWorkflowTaskTypeForJoinDynamicValid() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("FORK_JOIN_DYNAMIC");
workflowTask.setDynamicForkTasksParam("ForkTasksParam");
workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeForJoinDynamicWithForJoinTaskParamAndInputTaskParam() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("FORK_JOIN_DYNAMIC");
workflowTask.setDynamicForkJoinTasksParam("taskList");
workflowTask.setDynamicForkTasksInputParamName("ForkTaskInputParam");
workflowTask.setDynamicForkTasksParam("ForkTasksParam");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"dynamicForkJoinTasksParam or combination of dynamicForkTasksInputParamName and dynamicForkTasksParam cam be used for taskType: FORK_JOIN_DYNAMIC taskName: encode"));
}
@Test
public void testWorkflowTaskTypeHTTP() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("HTTP");
workflowTask.getInputParameters().put("http_request", "http://www.netflix.com");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeHTTPWithHttpParamMissing() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("HTTP");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"inputParameters.http_request field is required for taskType: HTTP taskName: encode"));
}
@Test
public void testWorkflowTaskTypeHTTPWithHttpParamInTaskDef() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("HTTP");
TaskDef taskDef = new TaskDef();
taskDef.setName("encode");
taskDef.getInputTemplate().put("http_request", "http://www.netflix.com");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef);
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeHTTPWithHttpParamInTaskDefAndWorkflowTask() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("HTTP");
workflowTask.getInputParameters().put("http_request", "http://www.netflix.com");
TaskDef taskDef = new TaskDef();
taskDef.setName("encode");
taskDef.getInputTemplate().put("http_request", "http://www.netflix.com");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef);
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeFork() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("FORK_JOIN");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"forkTasks should have atleast one task for taskType: FORK_JOIN taskName: encode"));
}
@Test
public void testWorkflowTaskTypeSubworkflowMissingSubworkflowParam() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("SUB_WORKFLOW");
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"subWorkflowParam field is required for taskType: SUB_WORKFLOW taskName: encode"));
}
@Test
public void testWorkflowTaskTypeSubworkflow() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("SUB_WORKFLOW");
SubWorkflowParams subWorkflowTask = new SubWorkflowParams();
workflowTask.setSubWorkflowParam(subWorkflowTask);
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(2, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(validationErrors.contains("SubWorkflowParams name cannot be null"));
assertTrue(validationErrors.contains("SubWorkflowParams name cannot be empty"));
}
@Test
public void testWorkflowTaskTypeTerminateWithoutTerminationStatus() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_TERMINATE);
workflowTask.setName("terminate_task");
workflowTask.setInputParameters(
Collections.singletonMap(
Terminate.getTerminationWorkflowOutputParameter(), "blah"));
List<String> validationErrors = getErrorMessages(workflowTask);
Assert.assertEquals(1, validationErrors.size());
Assert.assertEquals(
"terminate task must have an terminationStatus parameter and must be set to COMPLETED or FAILED, taskName: terminate_task",
validationErrors.get(0));
}
@Test
public void testWorkflowTaskTypeTerminateWithInvalidStatus() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_TERMINATE);
workflowTask.setName("terminate_task");
workflowTask.setInputParameters(
Collections.singletonMap(Terminate.getTerminationStatusParameter(), "blah"));
List<String> validationErrors = getErrorMessages(workflowTask);
Assert.assertEquals(1, validationErrors.size());
Assert.assertEquals(
"terminate task must have an terminationStatus parameter and must be set to COMPLETED or FAILED, taskName: terminate_task",
validationErrors.get(0));
}
@Test
public void testWorkflowTaskTypeTerminateOptional() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_TERMINATE);
workflowTask.setName("terminate_task");
workflowTask.setInputParameters(
Collections.singletonMap(Terminate.getTerminationStatusParameter(), "COMPLETED"));
workflowTask.setOptional(true);
List<String> validationErrors = getErrorMessages(workflowTask);
Assert.assertEquals(1, validationErrors.size());
Assert.assertEquals(
"terminate task cannot be optional, taskName: terminate_task",
validationErrors.get(0));
}
@Test
public void testWorkflowTaskTypeTerminateValid() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_TERMINATE);
workflowTask.setName("terminate_task");
workflowTask.setInputParameters(
Collections.singletonMap(Terminate.getTerminationStatusParameter(), "COMPLETED"));
List<String> validationErrors = getErrorMessages(workflowTask);
Assert.assertEquals(0, validationErrors.size());
}
@Test
public void testWorkflowTaskTypeKafkaPublish() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("KAFKA_PUBLISH");
workflowTask.getInputParameters().put("kafka_request", "testInput");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeKafkaPublishWithRequestParamMissing() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("KAFKA_PUBLISH");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"inputParameters.kafka_request field is required for taskType: KAFKA_PUBLISH taskName: encode"));
}
@Test
public void testWorkflowTaskTypeKafkaPublishWithKafkaParamInTaskDef() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("KAFKA_PUBLISH");
TaskDef taskDef = new TaskDef();
taskDef.setName("encode");
taskDef.getInputTemplate().put("kafka_request", "test_kafka_request");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef);
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeKafkaPublishWithRequestParamInTaskDefAndWorkflowTask() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("KAFKA_PUBLISH");
workflowTask.getInputParameters().put("kafka_request", "http://www.netflix.com");
TaskDef taskDef = new TaskDef();
taskDef.setName("encode");
taskDef.getInputTemplate().put("kafka_request", "test Kafka Request");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef);
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeJSONJQTransform() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("JSON_JQ_TRANSFORM");
workflowTask.getInputParameters().put("queryExpression", ".");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
@Test
public void testWorkflowTaskTypeJSONJQTransformWithQueryParamMissing() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("JSON_JQ_TRANSFORM");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(1, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"inputParameters.queryExpression field is required for taskType: JSON_JQ_TRANSFORM taskName: encode"));
}
@Test
public void testWorkflowTaskTypeJSONJQTransformWithQueryParamInTaskDef() {
WorkflowTask workflowTask = createSampleWorkflowTask();
workflowTask.setType("JSON_JQ_TRANSFORM");
TaskDef taskDef = new TaskDef();
taskDef.setName("encode");
taskDef.getInputTemplate().put("queryExpression", ".");
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(taskDef);
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
assertEquals(0, result.size());
}
private List<String> getErrorMessages(WorkflowTask workflowTask) {
Set<ConstraintViolation<WorkflowTask>> result = validator.validate(workflowTask);
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
return validationErrors;
}
private WorkflowTask createSampleWorkflowTask() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("encode");
workflowTask.setTaskReferenceName("encode");
workflowTask.setType("FORK_JOIN_DYNAMIC");
Map<String, Object> inputParam = new HashMap<>();
inputParam.put("fileLocation", "${workflow.input.fileLocation}");
workflowTask.setInputParameters(inputParam);
return workflowTask;
}
}
| 6,643 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/validations/WorkflowDefConstraintTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.validations;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import org.apache.bval.jsr.ApacheValidationProvider;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.dao.MetadataDAO;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
public class WorkflowDefConstraintTest {
private static Validator validator;
private static ValidatorFactory validatorFactory;
private MetadataDAO mockMetadataDao;
@BeforeClass
public static void init() {
validatorFactory =
Validation.byProvider(ApacheValidationProvider.class)
.configure()
.buildValidatorFactory();
validator = validatorFactory.getValidator();
}
@AfterClass
public static void close() {
validatorFactory.close();
}
@Before
public void setUp() {
mockMetadataDao = Mockito.mock(MetadataDAO.class);
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
ValidationContext.initialize(mockMetadataDao);
}
@Test
public void testWorkflowTaskName() {
TaskDef taskDef = new TaskDef(); // name is null
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
Set<ConstraintViolation<Object>> result = validator.validate(taskDef);
assertEquals(2, result.size());
}
@Test
public void testWorkflowTaskSimple() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("sampleWorkflow");
workflowDef.setDescription("Sample workflow def");
workflowDef.setOwnerEmail("sample@test.com");
workflowDef.setVersion(2);
WorkflowTask workflowTask_1 = new WorkflowTask();
workflowTask_1.setName("task_1");
workflowTask_1.setTaskReferenceName("task_1");
workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE);
Map<String, Object> inputParam = new HashMap<>();
inputParam.put("fileLocation", "${workflow.input.fileLocation}");
workflowTask_1.setInputParameters(inputParam);
List<WorkflowTask> tasks = new ArrayList<>();
tasks.add(workflowTask_1);
workflowDef.setTasks(tasks);
Set<ConstraintViolation<WorkflowDef>> result = validator.validate(workflowDef);
assertEquals(0, result.size());
}
@Test
/*Testcase to check inputParam is not valid
*/
public void testWorkflowTaskInvalidInputParam() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("sampleWorkflow");
workflowDef.setDescription("Sample workflow def");
workflowDef.setOwnerEmail("sample@test.com");
workflowDef.setVersion(2);
WorkflowTask workflowTask_1 = new WorkflowTask();
workflowTask_1.setName("task_1");
workflowTask_1.setTaskReferenceName("task_1");
workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE);
Map<String, Object> inputParam = new HashMap<>();
inputParam.put("fileLocation", "${work.input.fileLocation}");
workflowTask_1.setInputParameters(inputParam);
List<WorkflowTask> tasks = new ArrayList<>();
tasks.add(workflowTask_1);
workflowDef.setTasks(tasks);
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
validator = factory.getValidator();
when(mockMetadataDao.getTaskDef("work1")).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowDef>> result = validator.validate(workflowDef);
assertEquals(1, result.size());
assertEquals(
result.iterator().next().getMessage(),
"taskReferenceName: work for given task: task_1 input value: fileLocation of input parameter: ${work.input.fileLocation} is not defined in workflow definition.");
}
@Test
public void testWorkflowTaskReferenceNameNotUnique() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("sampleWorkflow");
workflowDef.setDescription("Sample workflow def");
workflowDef.setOwnerEmail("sample@test.com");
workflowDef.setVersion(2);
WorkflowTask workflowTask_1 = new WorkflowTask();
workflowTask_1.setName("task_1");
workflowTask_1.setTaskReferenceName("task_1");
workflowTask_1.setType(TaskType.TASK_TYPE_SIMPLE);
Map<String, Object> inputParam = new HashMap<>();
inputParam.put("fileLocation", "${task_2.input.fileLocation}");
workflowTask_1.setInputParameters(inputParam);
WorkflowTask workflowTask_2 = new WorkflowTask();
workflowTask_2.setName("task_2");
workflowTask_2.setTaskReferenceName("task_1");
workflowTask_2.setType(TaskType.TASK_TYPE_SIMPLE);
workflowTask_2.setInputParameters(inputParam);
List<WorkflowTask> tasks = new ArrayList<>();
tasks.add(workflowTask_1);
tasks.add(workflowTask_2);
workflowDef.setTasks(tasks);
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
validator = factory.getValidator();
when(mockMetadataDao.getTaskDef(anyString())).thenReturn(new TaskDef());
Set<ConstraintViolation<WorkflowDef>> result = validator.validate(workflowDef);
assertEquals(3, result.size());
List<String> validationErrors = new ArrayList<>();
result.forEach(e -> validationErrors.add(e.getMessage()));
assertTrue(
validationErrors.contains(
"taskReferenceName: task_2 for given task: task_2 input value: fileLocation of input parameter: ${task_2.input.fileLocation} is not defined in workflow definition."));
assertTrue(
validationErrors.contains(
"taskReferenceName: task_2 for given task: task_1 input value: fileLocation of input parameter: ${task_2.input.fileLocation} is not defined in workflow definition."));
assertTrue(
validationErrors.contains(
"taskReferenceName: task_1 should be unique across tasks for a given workflowDefinition: sampleWorkflow"));
}
}
| 6,644 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/service/WorkflowBulkServiceTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.validation.ConstraintViolationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import static com.netflix.conductor.TestUtils.getConstraintViolationMessages;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
@SuppressWarnings("SpringJavaAutowiredMembersInspection")
@RunWith(SpringRunner.class)
@EnableAutoConfiguration
public class WorkflowBulkServiceTest {
@TestConfiguration
static class TestWorkflowBulkConfiguration {
@Bean
WorkflowExecutor workflowExecutor() {
return mock(WorkflowExecutor.class);
}
@Bean
public WorkflowBulkService workflowBulkService(WorkflowExecutor workflowExecutor) {
return new WorkflowBulkServiceImpl(workflowExecutor);
}
}
@Autowired private WorkflowExecutor workflowExecutor;
@Autowired private WorkflowBulkService workflowBulkService;
@Test(expected = ConstraintViolationException.class)
public void testPauseWorkflowNull() {
try {
workflowBulkService.pauseWorkflow(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowIds list cannot be null."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testPauseWorkflowWithInvalidListSize() {
try {
List<String> list = new ArrayList<>(1001);
for (int i = 0; i < 1002; i++) {
list.add("test");
}
workflowBulkService.pauseWorkflow(list);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(
messages.contains(
"Cannot process more than 1000 workflows. Please use multiple requests."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testResumeWorkflowNull() {
try {
workflowBulkService.resumeWorkflow(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowIds list cannot be null."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testRestartWorkflowNull() {
try {
workflowBulkService.restart(null, false);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowIds list cannot be null."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testRetryWorkflowNull() {
try {
workflowBulkService.retry(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowIds list cannot be null."));
throw ex;
}
}
@Test
public void testRetryWorkflowSuccessful() {
// When
workflowBulkService.retry(Collections.singletonList("anyId"));
// Then
verify(workflowExecutor).retry("anyId", false);
}
@Test(expected = ConstraintViolationException.class)
public void testTerminateNull() {
try {
workflowBulkService.terminate(null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowIds list cannot be null."));
throw ex;
}
}
}
| 6,645 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/service/WorkflowServiceTest.java | /*
* Copyright 2021 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.validation.ConstraintViolationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.WorkflowSummary;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.operation.StartWorkflowOperation;
import static com.netflix.conductor.TestUtils.getConstraintViolationMessages;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@SuppressWarnings("SpringJavaAutowiredMembersInspection")
@RunWith(SpringRunner.class)
@EnableAutoConfiguration
public class WorkflowServiceTest {
@TestConfiguration
static class TestWorkflowConfiguration {
@Bean
public WorkflowExecutor workflowExecutor() {
return mock(WorkflowExecutor.class);
}
@Bean
public StartWorkflowOperation startWorkflowOperation() {
return mock(StartWorkflowOperation.class);
}
@Bean
public ExecutionService executionService() {
return mock(ExecutionService.class);
}
@Bean
public MetadataService metadataService() {
return mock(MetadataServiceImpl.class);
}
@Bean
public WorkflowService workflowService(
WorkflowExecutor workflowExecutor,
ExecutionService executionService,
MetadataService metadataService,
StartWorkflowOperation startWorkflowOperation) {
return new WorkflowServiceImpl(
workflowExecutor, executionService, metadataService, startWorkflowOperation);
}
}
@Autowired private WorkflowExecutor workflowExecutor;
@Autowired private ExecutionService executionService;
@Autowired private MetadataService metadataService;
@Autowired private WorkflowService workflowService;
@Test(expected = ConstraintViolationException.class)
public void testStartWorkflowNull() {
try {
workflowService.startWorkflow(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("StartWorkflowRequest cannot be null"));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testGetWorkflowsNoName() {
try {
workflowService.getWorkflows("", "c123", true, true);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("Workflow name cannot be null or empty"));
throw ex;
}
}
@Test
public void testGetWorklfowsSingleCorrelationId() {
Workflow workflow = new Workflow();
workflow.setCorrelationId("c123");
List<Workflow> workflowArrayList = Collections.singletonList(workflow);
when(executionService.getWorkflowInstances(
anyString(), anyString(), anyBoolean(), anyBoolean()))
.thenReturn(workflowArrayList);
assertEquals(workflowArrayList, workflowService.getWorkflows("test", "c123", true, true));
}
@Test
public void testGetWorklfowsMultipleCorrelationId() {
Workflow workflow = new Workflow();
workflow.setCorrelationId("c123");
List<Workflow> workflowArrayList = Collections.singletonList(workflow);
List<String> correlationIdList = Collections.singletonList("c123");
Map<String, List<Workflow>> workflowMap = new HashMap<>();
workflowMap.put("c123", workflowArrayList);
when(executionService.getWorkflowInstances(
anyString(), anyString(), anyBoolean(), anyBoolean()))
.thenReturn(workflowArrayList);
assertEquals(
workflowMap, workflowService.getWorkflows("test", true, true, correlationIdList));
}
@Test
public void testGetExecutionStatus() {
Workflow workflow = new Workflow();
workflow.setCorrelationId("c123");
when(executionService.getExecutionStatus(anyString(), anyBoolean())).thenReturn(workflow);
assertEquals(workflow, workflowService.getExecutionStatus("w123", true));
}
@Test(expected = ConstraintViolationException.class)
public void testGetExecutionStatusNoWorkflowId() {
try {
workflowService.getExecutionStatus("", true);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test(expected = NotFoundException.class)
public void testNotFoundExceptionGetExecutionStatus() {
when(executionService.getExecutionStatus(anyString(), anyBoolean())).thenReturn(null);
workflowService.getExecutionStatus("w123", true);
}
@Test
public void testDeleteWorkflow() {
workflowService.deleteWorkflow("w123", false);
verify(executionService, times(1)).removeWorkflow(anyString(), eq(false));
}
@Test(expected = ConstraintViolationException.class)
public void testInvalidDeleteWorkflow() {
try {
workflowService.deleteWorkflow(null, false);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test
public void testArchiveWorkflow() {
workflowService.deleteWorkflow("w123", true);
verify(executionService, times(1)).removeWorkflow(anyString(), eq(true));
}
@Test(expected = ConstraintViolationException.class)
public void testInvalidArchiveWorkflow() {
try {
workflowService.deleteWorkflow(null, true);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testInvalidPauseWorkflow() {
try {
workflowService.pauseWorkflow(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testInvalidResumeWorkflow() {
try {
workflowService.resumeWorkflow(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testInvalidSkipTaskFromWorkflow() {
try {
SkipTaskRequest skipTaskRequest = new SkipTaskRequest();
workflowService.skipTaskFromWorkflow(null, null, skipTaskRequest);
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId name cannot be null or empty."));
assertTrue(messages.contains("TaskReferenceName cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testInvalidWorkflowNameGetRunningWorkflows() {
try {
workflowService.getRunningWorkflows(null, 123, null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("Workflow name cannot be null or empty."));
throw ex;
}
}
@Test
public void testGetRunningWorkflowsTime() {
workflowService.getRunningWorkflows("test", 1, 100L, 120L);
verify(workflowExecutor, times(1))
.getWorkflows(anyString(), anyInt(), anyLong(), anyLong());
}
@Test
public void testGetRunningWorkflows() {
workflowService.getRunningWorkflows("test", 1, null, null);
verify(workflowExecutor, times(1)).getRunningWorkflowIds(anyString(), anyInt());
}
@Test
public void testDecideWorkflow() {
workflowService.decideWorkflow("test");
verify(workflowExecutor, times(1)).decide(anyString());
}
@Test
public void testPauseWorkflow() {
workflowService.pauseWorkflow("test");
verify(workflowExecutor, times(1)).pauseWorkflow(anyString());
}
@Test
public void testResumeWorkflow() {
workflowService.resumeWorkflow("test");
verify(workflowExecutor, times(1)).resumeWorkflow(anyString());
}
@Test
public void testSkipTaskFromWorkflow() {
workflowService.skipTaskFromWorkflow("test", "testTask", null);
verify(workflowExecutor, times(1)).skipTaskFromWorkflow(anyString(), anyString(), isNull());
}
@Test
public void testRerunWorkflow() {
RerunWorkflowRequest request = new RerunWorkflowRequest();
workflowService.rerunWorkflow("test", request);
verify(workflowExecutor, times(1)).rerun(any(RerunWorkflowRequest.class));
}
@Test(expected = ConstraintViolationException.class)
public void testRerunWorkflowNull() {
try {
workflowService.rerunWorkflow(null, null);
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
assertTrue(messages.contains("RerunWorkflowRequest cannot be null."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testRestartWorkflowNull() {
try {
workflowService.restartWorkflow(null, false);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testRetryWorkflowNull() {
try {
workflowService.retryWorkflow(null, false);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testResetWorkflowNull() {
try {
workflowService.resetWorkflow(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testTerminateWorkflowNull() {
try {
workflowService.terminateWorkflow(null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
throw ex;
}
}
@Test
public void testRerunWorkflowReturnWorkflowId() {
RerunWorkflowRequest request = new RerunWorkflowRequest();
String workflowId = "w123";
when(workflowExecutor.rerun(any(RerunWorkflowRequest.class))).thenReturn(workflowId);
assertEquals(workflowId, workflowService.rerunWorkflow("test", request));
}
@Test
public void testRestartWorkflow() {
workflowService.restartWorkflow("w123", false);
verify(workflowExecutor, times(1)).restart(anyString(), anyBoolean());
}
@Test
public void testRetryWorkflow() {
workflowService.retryWorkflow("w123", false);
verify(workflowExecutor, times(1)).retry(anyString(), anyBoolean());
}
@Test
public void testResetWorkflow() {
workflowService.resetWorkflow("w123");
verify(workflowExecutor, times(1)).resetCallbacksForWorkflow(anyString());
}
@Test
public void testTerminateWorkflow() {
workflowService.terminateWorkflow("w123", "test");
verify(workflowExecutor, times(1)).terminateWorkflow(anyString(), anyString());
}
@Test
public void testSearchWorkflows() {
Workflow workflow = new Workflow();
WorkflowDef def = new WorkflowDef();
def.setName("name");
def.setVersion(1);
workflow.setWorkflowDefinition(def);
workflow.setCorrelationId("c123");
WorkflowSummary workflowSummary = new WorkflowSummary(workflow);
List<WorkflowSummary> listOfWorkflowSummary = Collections.singletonList(workflowSummary);
SearchResult<WorkflowSummary> searchResult = new SearchResult<>(100, listOfWorkflowSummary);
when(executionService.search("*", "*", 0, 100, Collections.singletonList("asc")))
.thenReturn(searchResult);
assertEquals(searchResult, workflowService.searchWorkflows(0, 100, "asc", "*", "*"));
assertEquals(
searchResult,
workflowService.searchWorkflows(
0, 100, Collections.singletonList("asc"), "*", "*"));
}
@Test
public void testSearchWorkflowsV2() {
Workflow workflow = new Workflow();
workflow.setCorrelationId("c123");
List<Workflow> listOfWorkflow = Collections.singletonList(workflow);
SearchResult<Workflow> searchResult = new SearchResult<>(1, listOfWorkflow);
when(executionService.searchV2("*", "*", 0, 100, Collections.singletonList("asc")))
.thenReturn(searchResult);
assertEquals(searchResult, workflowService.searchWorkflowsV2(0, 100, "asc", "*", "*"));
assertEquals(
searchResult,
workflowService.searchWorkflowsV2(
0, 100, Collections.singletonList("asc"), "*", "*"));
}
@Test
public void testInvalidSizeSearchWorkflows() {
ConstraintViolationException ex =
assertThrows(
ConstraintViolationException.class,
() -> workflowService.searchWorkflows(0, 6000, "asc", "*", "*"));
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(
messages.contains(
"Cannot return more than 5000 workflows. Please use pagination."));
}
@Test
public void testInvalidSizeSearchWorkflowsV2() {
ConstraintViolationException ex =
assertThrows(
ConstraintViolationException.class,
() -> workflowService.searchWorkflowsV2(0, 6000, "asc", "*", "*"));
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(
messages.contains(
"Cannot return more than 5000 workflows. Please use pagination."));
}
@Test
public void testSearchWorkflowsByTasks() {
Workflow workflow = new Workflow();
WorkflowDef def = new WorkflowDef();
def.setName("name");
def.setVersion(1);
workflow.setWorkflowDefinition(def);
workflow.setCorrelationId("c123");
WorkflowSummary workflowSummary = new WorkflowSummary(workflow);
List<WorkflowSummary> listOfWorkflowSummary = Collections.singletonList(workflowSummary);
SearchResult<WorkflowSummary> searchResult = new SearchResult<>(100, listOfWorkflowSummary);
when(executionService.searchWorkflowByTasks(
"*", "*", 0, 100, Collections.singletonList("asc")))
.thenReturn(searchResult);
assertEquals(searchResult, workflowService.searchWorkflowsByTasks(0, 100, "asc", "*", "*"));
assertEquals(
searchResult,
workflowService.searchWorkflowsByTasks(
0, 100, Collections.singletonList("asc"), "*", "*"));
}
@Test
public void testSearchWorkflowsByTasksV2() {
Workflow workflow = new Workflow();
workflow.setCorrelationId("c123");
List<Workflow> listOfWorkflow = Collections.singletonList(workflow);
SearchResult<Workflow> searchResult = new SearchResult<>(1, listOfWorkflow);
when(executionService.searchWorkflowByTasksV2(
"*", "*", 0, 100, Collections.singletonList("asc")))
.thenReturn(searchResult);
assertEquals(
searchResult, workflowService.searchWorkflowsByTasksV2(0, 100, "asc", "*", "*"));
assertEquals(
searchResult,
workflowService.searchWorkflowsByTasksV2(
0, 100, Collections.singletonList("asc"), "*", "*"));
}
}
| 6,646 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/service/EventServiceTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.Set;
import javax.validation.ConstraintViolationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.core.events.EventQueues;
import static com.netflix.conductor.TestUtils.getConstraintViolationMessages;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
@SuppressWarnings("SpringJavaAutowiredMembersInspection")
@RunWith(SpringRunner.class)
@EnableAutoConfiguration
public class EventServiceTest {
@TestConfiguration
static class TestEventConfiguration {
@Bean
public EventService eventService() {
MetadataService metadataService = mock(MetadataService.class);
EventQueues eventQueues = mock(EventQueues.class);
return new EventServiceImpl(metadataService, eventQueues);
}
}
@Autowired private EventService eventService;
@Test(expected = ConstraintViolationException.class)
public void testAddEventHandler() {
try {
eventService.addEventHandler(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("EventHandler cannot be null."));
throw ex;
}
fail("eventService.addEventHandler did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateEventHandler() {
try {
eventService.updateEventHandler(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("EventHandler cannot be null."));
throw ex;
}
fail("eventService.updateEventHandler did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testRemoveEventHandlerStatus() {
try {
eventService.removeEventHandlerStatus(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("EventHandler name cannot be null or empty."));
throw ex;
}
fail("eventService.removeEventHandlerStatus did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testGetEventHandlersForEvent() {
try {
eventService.getEventHandlersForEvent(null, false);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("Event cannot be null or empty."));
throw ex;
}
fail("eventService.getEventHandlersForEvent did not throw ConstraintViolationException !");
}
}
| 6,647 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/service/TaskServiceTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import java.util.Set;
import javax.validation.ConstraintViolationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.dao.QueueDAO;
import static com.netflix.conductor.TestUtils.getConstraintViolationMessages;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@SuppressWarnings("SpringJavaAutowiredMembersInspection")
@RunWith(SpringRunner.class)
@EnableAutoConfiguration
public class TaskServiceTest {
@TestConfiguration
static class TestTaskConfiguration {
@Bean
public ExecutionService executionService() {
return mock(ExecutionService.class);
}
@Bean
public TaskService taskService(ExecutionService executionService) {
QueueDAO queueDAO = mock(QueueDAO.class);
return new TaskServiceImpl(executionService, queueDAO);
}
}
@Autowired private TaskService taskService;
@Autowired private ExecutionService executionService;
@Test(expected = ConstraintViolationException.class)
public void testPoll() {
try {
taskService.poll(null, null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskType cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testBatchPoll() {
try {
taskService.batchPoll(null, null, null, null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskType cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testGetTasks() {
try {
taskService.getTasks(null, null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskType cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testGetPendingTaskForWorkflow() {
try {
taskService.getPendingTaskForWorkflow(null, null);
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowId cannot be null or empty."));
assertTrue(messages.contains("TaskReferenceName cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateTask() {
try {
taskService.updateTask(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskResult cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateTaskInValid() {
try {
TaskResult taskResult = new TaskResult();
taskService.updateTask(taskResult);
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("Workflow Id cannot be null or empty"));
assertTrue(messages.contains("Task ID cannot be null or empty"));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testAckTaskReceived() {
try {
taskService.ackTaskReceived(null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskId cannot be null or empty."));
throw ex;
}
}
@Test
public void testAckTaskReceivedMissingWorkerId() {
String ack = taskService.ackTaskReceived("abc", null);
assertNotNull(ack);
}
@Test(expected = ConstraintViolationException.class)
public void testLog() {
try {
taskService.log(null, null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testGetTaskLogs() {
try {
taskService.getTaskLogs(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testGetTask() {
try {
taskService.getTask(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskId cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testRemoveTaskFromQueue() {
try {
taskService.removeTaskFromQueue(null, null);
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskId cannot be null or empty."));
assertTrue(messages.contains("TaskType cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testGetPollData() {
try {
taskService.getPollData(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskType cannot be null or empty."));
throw ex;
}
}
@Test(expected = ConstraintViolationException.class)
public void testRequeuePendingTask() {
try {
taskService.requeuePendingTask(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskType cannot be null or empty."));
throw ex;
}
}
@Test
public void testSearch() {
SearchResult<TaskSummary> searchResult =
new SearchResult<>(2, List.of(mock(TaskSummary.class), mock(TaskSummary.class)));
when(executionService.getSearchTasks("query", "*", 0, 2, "Sort")).thenReturn(searchResult);
assertEquals(searchResult, taskService.search(0, 2, "Sort", "*", "query"));
}
@Test
public void testSearchV2() {
SearchResult<Task> searchResult =
new SearchResult<>(2, List.of(mock(Task.class), mock(Task.class)));
when(executionService.getSearchTasksV2("query", "*", 0, 2, "Sort"))
.thenReturn(searchResult);
assertEquals(searchResult, taskService.searchV2(0, 2, "Sort", "*", "query"));
}
}
| 6,648 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java | /*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.validation.ConstraintViolationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.metadata.events.EventHandler;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDefSummary;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.dao.EventHandlerDAO;
import com.netflix.conductor.dao.MetadataDAO;
import static com.netflix.conductor.TestUtils.getConstraintViolationMessages;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@SuppressWarnings("SpringJavaAutowiredMembersInspection")
@RunWith(SpringRunner.class)
@EnableAutoConfiguration
public class MetadataServiceTest {
@TestConfiguration
static class TestMetadataConfiguration {
@Bean
public MetadataDAO metadataDAO() {
return mock(MetadataDAO.class);
}
@Bean
public ConductorProperties properties() {
ConductorProperties properties = mock(ConductorProperties.class);
when(properties.isOwnerEmailMandatory()).thenReturn(true);
return properties;
}
@Bean
public MetadataService metadataService(
MetadataDAO metadataDAO, ConductorProperties properties) {
EventHandlerDAO eventHandlerDAO = mock(EventHandlerDAO.class);
when(metadataDAO.getAllWorkflowDefs()).thenReturn(mockWorkflowDefs());
return new MetadataServiceImpl(metadataDAO, eventHandlerDAO, properties);
}
private List<WorkflowDef> mockWorkflowDefs() {
// Returns list of workflowDefs in reverse version order.
List<WorkflowDef> retval = new ArrayList<>();
for (int i = 5; i > 0; i--) {
WorkflowDef def = new WorkflowDef();
def.setCreateTime(new Date().getTime());
def.setVersion(i);
def.setName("test_workflow_def");
retval.add(def);
}
return retval;
}
}
@Autowired private MetadataDAO metadataDAO;
@Autowired private MetadataService metadataService;
@Test(expected = ConstraintViolationException.class)
public void testRegisterTaskDefNoName() {
TaskDef taskDef = new TaskDef();
try {
metadataService.registerTaskDef(Collections.singletonList(taskDef));
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskDef name cannot be null or empty"));
assertTrue(messages.contains("ownerEmail cannot be empty"));
throw ex;
}
fail("metadataService.registerTaskDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testRegisterTaskDefNull() {
try {
metadataService.registerTaskDef(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskDefList cannot be empty or null"));
throw ex;
}
fail("metadataService.registerTaskDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testRegisterTaskDefNoResponseTimeout() {
try {
TaskDef taskDef = new TaskDef();
taskDef.setName("somename");
taskDef.setOwnerEmail("sample@test.com");
taskDef.setResponseTimeoutSeconds(0);
metadataService.registerTaskDef(Collections.singletonList(taskDef));
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(
messages.contains(
"TaskDef responseTimeoutSeconds: 0 should be minimum 1 second"));
throw ex;
}
fail("metadataService.registerTaskDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateTaskDefNameNull() {
try {
TaskDef taskDef = new TaskDef();
metadataService.updateTaskDef(taskDef);
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskDef name cannot be null or empty"));
assertTrue(messages.contains("ownerEmail cannot be empty"));
throw ex;
}
fail("metadataService.updateTaskDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateTaskDefNull() {
try {
metadataService.updateTaskDef(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("TaskDef cannot be null"));
throw ex;
}
fail("metadataService.updateTaskDef did not throw ConstraintViolationException !");
}
@Test(expected = NotFoundException.class)
public void testUpdateTaskDefNotExisting() {
TaskDef taskDef = new TaskDef();
taskDef.setName("test");
taskDef.setOwnerEmail("sample@test.com");
when(metadataDAO.getTaskDef(any())).thenReturn(null);
metadataService.updateTaskDef(taskDef);
}
@Test(expected = NotFoundException.class)
public void testUpdateTaskDefDaoException() {
TaskDef taskDef = new TaskDef();
taskDef.setName("test");
taskDef.setOwnerEmail("sample@test.com");
when(metadataDAO.getTaskDef(any())).thenReturn(null);
metadataService.updateTaskDef(taskDef);
}
@Test
public void testRegisterTaskDef() {
TaskDef taskDef = new TaskDef();
taskDef.setName("somename");
taskDef.setOwnerEmail("sample@test.com");
taskDef.setResponseTimeoutSeconds(60 * 60);
metadataService.registerTaskDef(Collections.singletonList(taskDef));
verify(metadataDAO, times(1)).createTaskDef(any(TaskDef.class));
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateWorkflowDefNull() {
try {
List<WorkflowDef> workflowDefList = null;
metadataService.updateWorkflowDef(workflowDefList);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowDef list name cannot be null or empty"));
throw ex;
}
fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateWorkflowDefEmptyList() {
try {
List<WorkflowDef> workflowDefList = new ArrayList<>();
metadataService.updateWorkflowDef(workflowDefList);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowDefList is empty"));
throw ex;
}
fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateWorkflowDefWithNullWorkflowDef() {
try {
List<WorkflowDef> workflowDefList = new ArrayList<>();
workflowDefList.add(null);
metadataService.updateWorkflowDef(workflowDefList);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowDef cannot be null"));
throw ex;
}
fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testUpdateWorkflowDefWithEmptyWorkflowDefName() {
try {
List<WorkflowDef> workflowDefList = new ArrayList<>();
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName(null);
workflowDef.setOwnerEmail(null);
workflowDefList.add(workflowDef);
metadataService.updateWorkflowDef(workflowDefList);
} catch (ConstraintViolationException ex) {
assertEquals(3, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowDef name cannot be null or empty"));
assertTrue(messages.contains("WorkflowTask list cannot be empty"));
assertTrue(messages.contains("ownerEmail cannot be empty"));
throw ex;
}
fail("metadataService.updateWorkflowDef did not throw ConstraintViolationException !");
}
@Test
public void testUpdateWorkflowDef() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("somename");
workflowDef.setOwnerEmail("sample@test.com");
List<WorkflowTask> tasks = new ArrayList<>();
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskReferenceName("hello");
workflowTask.setName("hello");
tasks.add(workflowTask);
workflowDef.setTasks(tasks);
when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef());
metadataService.updateWorkflowDef(Collections.singletonList(workflowDef));
verify(metadataDAO, times(1)).updateWorkflowDef(workflowDef);
}
@Test(expected = ConstraintViolationException.class)
public void testRegisterWorkflowDefNoName() {
try {
WorkflowDef workflowDef = new WorkflowDef();
metadataService.registerWorkflowDef(workflowDef);
} catch (ConstraintViolationException ex) {
assertEquals(3, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowDef name cannot be null or empty"));
assertTrue(messages.contains("WorkflowTask list cannot be empty"));
assertTrue(messages.contains("ownerEmail cannot be empty"));
throw ex;
}
fail("metadataService.registerWorkflowDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testValidateWorkflowDefNoName() {
try {
WorkflowDef workflowDef = new WorkflowDef();
metadataService.validateWorkflowDef(workflowDef);
} catch (ConstraintViolationException ex) {
assertEquals(3, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowDef name cannot be null or empty"));
assertTrue(messages.contains("WorkflowTask list cannot be empty"));
assertTrue(messages.contains("ownerEmail cannot be empty"));
throw ex;
}
fail("metadataService.validateWorkflowDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testRegisterWorkflowDefInvalidName() {
try {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("invalid:name");
workflowDef.setOwnerEmail("inavlid-email");
metadataService.registerWorkflowDef(workflowDef);
} catch (ConstraintViolationException ex) {
assertEquals(3, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowTask list cannot be empty"));
assertTrue(
messages.contains(
"Workflow name cannot contain the following set of characters: ':'"));
assertTrue(messages.contains("ownerEmail should be valid email address"));
throw ex;
}
fail("metadataService.registerWorkflowDef did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testValidateWorkflowDefInvalidName() {
try {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("invalid:name");
workflowDef.setOwnerEmail("inavlid-email");
metadataService.validateWorkflowDef(workflowDef);
} catch (ConstraintViolationException ex) {
assertEquals(3, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("WorkflowTask list cannot be empty"));
assertTrue(
messages.contains(
"Workflow name cannot contain the following set of characters: ':'"));
assertTrue(messages.contains("ownerEmail should be valid email address"));
throw ex;
}
fail("metadataService.validateWorkflowDef did not throw ConstraintViolationException !");
}
@Test
public void testRegisterWorkflowDef() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("somename");
workflowDef.setSchemaVersion(2);
workflowDef.setOwnerEmail("sample@test.com");
List<WorkflowTask> tasks = new ArrayList<>();
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskReferenceName("hello");
workflowTask.setName("hello");
tasks.add(workflowTask);
workflowDef.setTasks(tasks);
when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef());
metadataService.registerWorkflowDef(workflowDef);
verify(metadataDAO, times(1)).createWorkflowDef(workflowDef);
assertEquals(2, workflowDef.getSchemaVersion());
}
@Test
public void testValidateWorkflowDef() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("somename");
workflowDef.setSchemaVersion(2);
workflowDef.setOwnerEmail("sample@test.com");
List<WorkflowTask> tasks = new ArrayList<>();
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskReferenceName("hello");
workflowTask.setName("hello");
tasks.add(workflowTask);
workflowDef.setTasks(tasks);
when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef());
metadataService.validateWorkflowDef(workflowDef);
verify(metadataDAO, times(1)).createWorkflowDef(workflowDef);
assertEquals(2, workflowDef.getSchemaVersion());
}
@Test(expected = ConstraintViolationException.class)
public void testUnregisterWorkflowDefNoName() {
try {
metadataService.unregisterWorkflowDef("", null);
} catch (ConstraintViolationException ex) {
assertEquals(2, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("Workflow name cannot be null or empty"));
assertTrue(messages.contains("Version cannot be null"));
throw ex;
}
fail("metadataService.unregisterWorkflowDef did not throw ConstraintViolationException !");
}
@Test
public void testUnregisterWorkflowDef() {
metadataService.unregisterWorkflowDef("somename", 111);
verify(metadataDAO, times(1)).removeWorkflowDef("somename", 111);
}
@Test(expected = ConstraintViolationException.class)
public void testValidateEventNull() {
try {
metadataService.addEventHandler(null);
} catch (ConstraintViolationException ex) {
assertEquals(1, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("EventHandler cannot be null"));
throw ex;
}
fail("metadataService.addEventHandler did not throw ConstraintViolationException !");
}
@Test(expected = ConstraintViolationException.class)
public void testValidateEventNoEvent() {
try {
EventHandler eventHandler = new EventHandler();
metadataService.addEventHandler(eventHandler);
} catch (ConstraintViolationException ex) {
assertEquals(3, ex.getConstraintViolations().size());
Set<String> messages = getConstraintViolationMessages(ex.getConstraintViolations());
assertTrue(messages.contains("Missing event handler name"));
assertTrue(messages.contains("Missing event location"));
assertTrue(
messages.contains("No actions specified. Please specify at-least one action"));
throw ex;
}
fail("metadataService.addEventHandler did not throw ConstraintViolationException !");
}
@Test
public void testWorkflowNamesAndVersions() {
Map<String, ? extends Iterable<WorkflowDefSummary>> namesAndVersions =
metadataService.getWorkflowNamesAndVersions();
Iterator<WorkflowDefSummary> versions =
namesAndVersions.get("test_workflow_def").iterator();
for (int i = 1; i <= 5; i++) {
WorkflowDefSummary ver = versions.next();
assertEquals(i, ver.getVersion());
assertNotNull(ver.getCreateTime());
assertEquals("test_workflow_def", ver.getName());
}
}
}
| 6,649 |
0 | Create_ds/conductor/core/src/test/java/com/netflix/conductor | Create_ds/conductor/core/src/test/java/com/netflix/conductor/service/ExecutionServiceTest.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.WorkflowSummary;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.dao.QueueDAO;
import static junit.framework.TestCase.assertEquals;
import static org.mockito.Mockito.when;
@RunWith(SpringRunner.class)
public class ExecutionServiceTest {
@Mock private WorkflowExecutor workflowExecutor;
@Mock private ExecutionDAOFacade executionDAOFacade;
@Mock private QueueDAO queueDAO;
@Mock private ConductorProperties conductorProperties;
@Mock private ExternalPayloadStorage externalPayloadStorage;
@Mock private SystemTaskRegistry systemTaskRegistry;
private ExecutionService executionService;
private Workflow workflow1;
private Workflow workflow2;
private Task taskWorkflow1;
private Task taskWorkflow2;
private final List<String> sort = Collections.singletonList("Sort");
@Before
public void setup() {
when(conductorProperties.getTaskExecutionPostponeDuration())
.thenReturn(Duration.ofSeconds(60));
executionService =
new ExecutionService(
workflowExecutor,
executionDAOFacade,
queueDAO,
conductorProperties,
externalPayloadStorage,
systemTaskRegistry);
WorkflowDef workflowDef = new WorkflowDef();
workflow1 = new Workflow();
workflow1.setWorkflowId("wf1");
workflow1.setWorkflowDefinition(workflowDef);
workflow2 = new Workflow();
workflow2.setWorkflowId("wf2");
workflow2.setWorkflowDefinition(workflowDef);
taskWorkflow1 = new Task();
taskWorkflow1.setTaskId("task1");
taskWorkflow1.setWorkflowInstanceId("wf1");
taskWorkflow2 = new Task();
taskWorkflow2.setTaskId("task2");
taskWorkflow2.setWorkflowInstanceId("wf2");
}
@Test
public void workflowSearchTest() {
when(executionDAOFacade.searchWorkflowSummary("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
new WorkflowSummary(workflow1),
new WorkflowSummary(workflow2))));
when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false))
.thenReturn(workflow1);
when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false))
.thenReturn(workflow2);
SearchResult<WorkflowSummary> searchResult =
executionService.search("query", "*", 0, 2, sort);
assertEquals(2, searchResult.getTotalHits());
assertEquals(2, searchResult.getResults().size());
assertEquals(workflow1.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId());
assertEquals(workflow2.getWorkflowId(), searchResult.getResults().get(1).getWorkflowId());
}
@Test
public void workflowSearchV2Test() {
when(executionDAOFacade.searchWorkflows("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
workflow1.getWorkflowId(), workflow2.getWorkflowId())));
when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false))
.thenReturn(workflow1);
when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false))
.thenReturn(workflow2);
SearchResult<Workflow> searchResult = executionService.searchV2("query", "*", 0, 2, sort);
assertEquals(2, searchResult.getTotalHits());
assertEquals(Arrays.asList(workflow1, workflow2), searchResult.getResults());
}
@Test
public void workflowSearchV2ExceptionTest() {
when(executionDAOFacade.searchWorkflows("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
workflow1.getWorkflowId(), workflow2.getWorkflowId())));
when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false))
.thenReturn(workflow1);
when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false))
.thenThrow(new RuntimeException());
SearchResult<Workflow> searchResult = executionService.searchV2("query", "*", 0, 2, sort);
assertEquals(1, searchResult.getTotalHits());
assertEquals(Collections.singletonList(workflow1), searchResult.getResults());
}
@Test
public void workflowSearchByTasksTest() {
when(executionDAOFacade.searchTaskSummary("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
new TaskSummary(taskWorkflow1),
new TaskSummary(taskWorkflow2))));
when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false))
.thenReturn(workflow1);
when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false))
.thenReturn(workflow2);
SearchResult<WorkflowSummary> searchResult =
executionService.searchWorkflowByTasks("query", "*", 0, 2, sort);
assertEquals(2, searchResult.getTotalHits());
assertEquals(2, searchResult.getResults().size());
assertEquals(workflow1.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId());
assertEquals(workflow2.getWorkflowId(), searchResult.getResults().get(1).getWorkflowId());
}
@Test
public void workflowSearchByTasksExceptionTest() {
when(executionDAOFacade.searchTaskSummary("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
new TaskSummary(taskWorkflow1),
new TaskSummary(taskWorkflow2))));
when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false))
.thenReturn(workflow1);
when(executionDAOFacade.getTask(workflow2.getWorkflowId()))
.thenThrow(new RuntimeException());
SearchResult<WorkflowSummary> searchResult =
executionService.searchWorkflowByTasks("query", "*", 0, 2, sort);
assertEquals(1, searchResult.getTotalHits());
assertEquals(1, searchResult.getResults().size());
assertEquals(workflow1.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId());
}
@Test
public void workflowSearchByTasksV2Test() {
when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId())));
when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1);
when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())).thenReturn(taskWorkflow2);
when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false))
.thenReturn(workflow1);
when(executionDAOFacade.getWorkflow(workflow2.getWorkflowId(), false))
.thenReturn(workflow2);
SearchResult<Workflow> searchResult =
executionService.searchWorkflowByTasksV2("query", "*", 0, 2, sort);
assertEquals(2, searchResult.getTotalHits());
assertEquals(Arrays.asList(workflow1, workflow2), searchResult.getResults());
}
@Test
public void workflowSearchByTasksV2ExceptionTest() {
when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId())));
when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1);
when(executionDAOFacade.getTask(taskWorkflow2.getTaskId()))
.thenThrow(new RuntimeException());
when(executionDAOFacade.getWorkflow(workflow1.getWorkflowId(), false))
.thenReturn(workflow1);
SearchResult<Workflow> searchResult =
executionService.searchWorkflowByTasksV2("query", "*", 0, 2, sort);
assertEquals(1, searchResult.getTotalHits());
assertEquals(Collections.singletonList(workflow1), searchResult.getResults());
}
@Test
public void TaskSearchTest() {
List<TaskSummary> taskList =
Arrays.asList(new TaskSummary(taskWorkflow1), new TaskSummary(taskWorkflow2));
when(executionDAOFacade.searchTaskSummary("query", "*", 0, 2, sort))
.thenReturn(new SearchResult<>(2, taskList));
SearchResult<TaskSummary> searchResult =
executionService.getSearchTasks("query", "*", 0, 2, "Sort");
assertEquals(2, searchResult.getTotalHits());
assertEquals(2, searchResult.getResults().size());
assertEquals(taskWorkflow1.getTaskId(), searchResult.getResults().get(0).getTaskId());
assertEquals(taskWorkflow2.getTaskId(), searchResult.getResults().get(1).getTaskId());
}
@Test
public void TaskSearchV2Test() {
when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId())));
when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1);
when(executionDAOFacade.getTask(taskWorkflow2.getTaskId())).thenReturn(taskWorkflow2);
SearchResult<Task> searchResult =
executionService.getSearchTasksV2("query", "*", 0, 2, "Sort");
assertEquals(2, searchResult.getTotalHits());
assertEquals(Arrays.asList(taskWorkflow1, taskWorkflow2), searchResult.getResults());
}
@Test
public void TaskSearchV2ExceptionTest() {
when(executionDAOFacade.searchTasks("query", "*", 0, 2, sort))
.thenReturn(
new SearchResult<>(
2,
Arrays.asList(
taskWorkflow1.getTaskId(), taskWorkflow2.getTaskId())));
when(executionDAOFacade.getTask(taskWorkflow1.getTaskId())).thenReturn(taskWorkflow1);
when(executionDAOFacade.getTask(taskWorkflow2.getTaskId()))
.thenThrow(new RuntimeException());
SearchResult<Task> searchResult =
executionService.getSearchTasksV2("query", "*", 0, 2, "Sort");
assertEquals(1, searchResult.getTotalHits());
assertEquals(Collections.singletonList(taskWorkflow1), searchResult.getResults());
}
}
| 6,650 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor | Create_ds/conductor/core/src/main/java/com/netflix/conductor/metrics/Monitors.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.metrics;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.spectator.api.Counter;
import com.netflix.spectator.api.DistributionSummary;
import com.netflix.spectator.api.Gauge;
import com.netflix.spectator.api.Id;
import com.netflix.spectator.api.Registry;
import com.netflix.spectator.api.Spectator;
import com.netflix.spectator.api.Timer;
import com.netflix.spectator.api.histogram.PercentileTimer;
public class Monitors {
private static final Registry registry = Spectator.globalRegistry();
public static final String NO_DOMAIN = "NO_DOMAIN";
private static final Map<String, Map<Map<String, String>, Counter>> counters =
new ConcurrentHashMap<>();
private static final Map<String, Map<Map<String, String>, PercentileTimer>> timers =
new ConcurrentHashMap<>();
private static final Map<String, Map<Map<String, String>, Gauge>> gauges =
new ConcurrentHashMap<>();
private static final Map<String, Map<Map<String, String>, DistributionSummary>>
distributionSummaries = new ConcurrentHashMap<>();
public static final String classQualifier = "WorkflowMonitor";
private Monitors() {}
/**
* Increment a counter that is used to measure the rate at which some event is occurring.
* Consider a simple queue, counters would be used to measure things like the rate at which
* items are being inserted and removed.
*
* @param className
* @param name
* @param additionalTags
*/
private static void counter(String className, String name, String... additionalTags) {
getCounter(className, name, additionalTags).increment();
}
/**
* Set a gauge is a handle to get the current value. Typical examples for gauges would be the
* size of a queue or number of threads in the running state. Since gauges are sampled, there is
* no information about what might have occurred between samples.
*
* @param className
* @param name
* @param measurement
* @param additionalTags
*/
private static void gauge(
String className, String name, long measurement, String... additionalTags) {
getGauge(className, name, additionalTags).set(measurement);
}
/**
* Records a value for an event as a distribution summary. Unlike a gauge, this is sampled
* multiple times during a minute or everytime a new value is recorded.
*
* @param className
* @param name
* @param additionalTags
*/
private static void distributionSummary(
String className, String name, long value, String... additionalTags) {
getDistributionSummary(className, name, additionalTags).record(value);
}
private static Timer getTimer(String className, String name, String... additionalTags) {
Map<String, String> tags = toMap(className, additionalTags);
return timers.computeIfAbsent(name, s -> new ConcurrentHashMap<>())
.computeIfAbsent(
tags,
t -> {
Id id = registry.createId(name, tags);
return PercentileTimer.get(registry, id);
});
}
private static Counter getCounter(String className, String name, String... additionalTags) {
Map<String, String> tags = toMap(className, additionalTags);
return counters.computeIfAbsent(name, s -> new ConcurrentHashMap<>())
.computeIfAbsent(
tags,
t -> {
Id id = registry.createId(name, tags);
return registry.counter(id);
});
}
private static Gauge getGauge(String className, String name, String... additionalTags) {
Map<String, String> tags = toMap(className, additionalTags);
return gauges.computeIfAbsent(name, s -> new ConcurrentHashMap<>())
.computeIfAbsent(
tags,
t -> {
Id id = registry.createId(name, tags);
return registry.gauge(id);
});
}
private static DistributionSummary getDistributionSummary(
String className, String name, String... additionalTags) {
Map<String, String> tags = toMap(className, additionalTags);
return distributionSummaries
.computeIfAbsent(name, s -> new ConcurrentHashMap<>())
.computeIfAbsent(
tags,
t -> {
Id id = registry.createId(name, tags);
return registry.distributionSummary(id);
});
}
private static Map<String, String> toMap(String className, String... additionalTags) {
Map<String, String> tags = new HashMap<>();
tags.put("class", className);
for (int j = 0; j < additionalTags.length - 1; j++) {
String tk = additionalTags[j];
String tv = "" + additionalTags[j + 1];
if (!tv.isEmpty()) {
tags.put(tk, tv);
}
j++;
}
return tags;
}
/**
* @param className Name of the class
* @param methodName Method name
*/
public static void error(String className, String methodName) {
getCounter(className, "workflow_server_error", "methodName", methodName).increment();
}
public static void recordGauge(String name, long count) {
gauge(classQualifier, name, count);
}
public static void recordCounter(String name, long count, String... additionalTags) {
getCounter(classQualifier, name, additionalTags).increment(count);
}
public static void recordQueueWaitTime(String taskType, long queueWaitTime) {
getTimer(classQualifier, "task_queue_wait", "taskType", taskType)
.record(queueWaitTime, TimeUnit.MILLISECONDS);
}
public static void recordTaskExecutionTime(
String taskType, long duration, boolean includesRetries, TaskModel.Status status) {
getTimer(
classQualifier,
"task_execution",
"taskType",
taskType,
"includeRetries",
"" + includesRetries,
"status",
status.name())
.record(duration, TimeUnit.MILLISECONDS);
}
public static void recordWorkflowDecisionTime(long duration) {
getTimer(classQualifier, "workflow_decision").record(duration, TimeUnit.MILLISECONDS);
}
public static void recordTaskPollError(String taskType, String exception) {
recordTaskPollError(taskType, NO_DOMAIN, exception);
}
public static void recordTaskPollError(String taskType, String domain, String exception) {
counter(
classQualifier,
"task_poll_error",
"taskType",
taskType,
"domain",
domain,
"exception",
exception);
}
public static void recordTaskPoll(String taskType) {
counter(classQualifier, "task_poll", "taskType", taskType);
}
public static void recordTaskPollCount(String taskType, int count) {
recordTaskPollCount(taskType, NO_DOMAIN, count);
}
public static void recordTaskPollCount(String taskType, String domain, int count) {
getCounter(classQualifier, "task_poll_count", "taskType", taskType, "domain", domain)
.increment(count);
}
public static void recordQueueDepth(String taskType, long size, String ownerApp) {
gauge(
classQualifier,
"task_queue_depth",
size,
"taskType",
taskType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordTaskInProgress(String taskType, long size, String ownerApp) {
gauge(
classQualifier,
"task_in_progress",
size,
"taskType",
taskType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordRunningWorkflows(long count, String name, String ownerApp) {
gauge(
classQualifier,
"workflow_running",
count,
"workflowName",
name,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordNumTasksInWorkflow(long count, String name, String version) {
distributionSummary(
classQualifier,
"tasks_in_workflow",
count,
"workflowName",
name,
"version",
version);
}
public static void recordTaskTimeout(String taskType) {
counter(classQualifier, "task_timeout", "taskType", taskType);
}
public static void recordTaskResponseTimeout(String taskType) {
counter(classQualifier, "task_response_timeout", "taskType", taskType);
}
public static void recordTaskPendingTime(String taskType, String workflowType, long duration) {
gauge(
classQualifier,
"task_pending_time",
duration,
"workflowName",
workflowType,
"taskType",
taskType);
}
public static void recordWorkflowTermination(
String workflowType, WorkflowModel.Status status, String ownerApp) {
counter(
classQualifier,
"workflow_failure",
"workflowName",
workflowType,
"status",
status.name(),
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordWorkflowStartSuccess(
String workflowType, String version, String ownerApp) {
counter(
classQualifier,
"workflow_start_success",
"workflowName",
workflowType,
"version",
version,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordWorkflowStartError(String workflowType, String ownerApp) {
counter(
classQualifier,
"workflow_start_error",
"workflowName",
workflowType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordUpdateConflict(
String taskType, String workflowType, WorkflowModel.Status status) {
counter(
classQualifier,
"task_update_conflict",
"workflowName",
workflowType,
"taskType",
taskType,
"workflowStatus",
status.name());
}
public static void recordUpdateConflict(
String taskType, String workflowType, TaskModel.Status status) {
counter(
classQualifier,
"task_update_conflict",
"workflowName",
workflowType,
"taskType",
taskType,
"taskStatus",
status.name());
}
public static void recordTaskUpdateError(String taskType, String workflowType) {
counter(
classQualifier,
"task_update_error",
"workflowName",
workflowType,
"taskType",
taskType);
}
public static void recordTaskExtendLeaseError(String taskType, String workflowType) {
counter(
classQualifier,
"task_extendLease_error",
"workflowName",
workflowType,
"taskType",
taskType);
}
public static void recordTaskQueueOpError(String taskType, String workflowType) {
counter(
classQualifier,
"task_queue_op_error",
"workflowName",
workflowType,
"taskType",
taskType);
}
public static void recordWorkflowCompletion(
String workflowType, long duration, String ownerApp) {
getTimer(
classQualifier,
"workflow_execution",
"workflowName",
workflowType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"))
.record(duration, TimeUnit.MILLISECONDS);
}
public static void recordUnackTime(String workflowType, long duration) {
getTimer(classQualifier, "workflow_unack", "workflowName", workflowType)
.record(duration, TimeUnit.MILLISECONDS);
}
public static void recordTaskRateLimited(String taskDefName, int limit) {
gauge(classQualifier, "task_rate_limited", limit, "taskType", taskDefName);
}
public static void recordTaskConcurrentExecutionLimited(String taskDefName, int limit) {
gauge(classQualifier, "task_concurrent_execution_limited", limit, "taskType", taskDefName);
}
public static void recordEventQueueMessagesProcessed(
String queueType, String queueName, int count) {
getCounter(
classQualifier,
"event_queue_messages_processed",
"queueType",
queueType,
"queueName",
queueName)
.increment(count);
}
public static void recordObservableQMessageReceivedErrors(String queueType) {
counter(classQualifier, "observable_queue_error", "queueType", queueType);
}
public static void recordEventQueueMessagesHandled(String queueType, String queueName) {
counter(
classQualifier,
"event_queue_messages_handled",
"queueType",
queueType,
"queueName",
queueName);
}
public static void recordEventQueueMessagesError(String queueType, String queueName) {
counter(
classQualifier,
"event_queue_messages_error",
"queueType",
queueType,
"queueName",
queueName);
}
public static void recordEventExecutionSuccess(String event, String handler, String action) {
counter(
classQualifier,
"event_execution_success",
"event",
event,
"handler",
handler,
"action",
action);
}
public static void recordEventExecutionError(
String event, String handler, String action, String exceptionClazz) {
counter(
classQualifier,
"event_execution_error",
"event",
event,
"handler",
handler,
"action",
action,
"exception",
exceptionClazz);
}
public static void recordEventActionError(String action, String entityName, String event) {
counter(
classQualifier,
"event_action_error",
"action",
action,
"entityName",
entityName,
"event",
event);
}
public static void recordDaoRequests(
String dao, String action, String taskType, String workflowType) {
counter(
classQualifier,
"dao_requests",
"dao",
dao,
"action",
action,
"taskType",
StringUtils.defaultIfBlank(taskType, "unknown"),
"workflowType",
StringUtils.defaultIfBlank(workflowType, "unknown"));
}
public static void recordDaoEventRequests(String dao, String action, String event) {
counter(classQualifier, "dao_event_requests", "dao", dao, "action", action, "event", event);
}
public static void recordDaoPayloadSize(
String dao, String action, String taskType, String workflowType, int size) {
gauge(
classQualifier,
"dao_payload_size",
size,
"dao",
dao,
"action",
action,
"taskType",
StringUtils.defaultIfBlank(taskType, "unknown"),
"workflowType",
StringUtils.defaultIfBlank(workflowType, "unknown"));
}
public static void recordExternalPayloadStorageUsage(
String name, String operation, String payloadType) {
counter(
classQualifier,
"external_payload_storage_usage",
"name",
name,
"operation",
operation,
"payloadType",
payloadType);
}
public static void recordDaoError(String dao, String action) {
counter(classQualifier, "dao_errors", "dao", dao, "action", action);
}
public static void recordAckTaskError(String taskType) {
counter(classQualifier, "task_ack_error", "taskType", taskType);
}
public static void recordESIndexTime(String action, String docType, long val) {
getTimer(Monitors.classQualifier, action, "docType", docType)
.record(val, TimeUnit.MILLISECONDS);
}
public static void recordWorkerQueueSize(String queueType, int val) {
gauge(Monitors.classQualifier, "indexing_worker_queue", val, "queueType", queueType);
}
public static void recordDiscardedIndexingCount(String queueType) {
counter(Monitors.classQualifier, "discarded_index_count", "queueType", queueType);
}
public static void recordAcquireLockUnsuccessful() {
counter(classQualifier, "acquire_lock_unsuccessful");
}
public static void recordAcquireLockFailure(String exceptionClassName) {
counter(classQualifier, "acquire_lock_failure", "exceptionType", exceptionClassName);
}
public static void recordWorkflowArchived(String workflowType, WorkflowModel.Status status) {
counter(
classQualifier,
"workflow_archived",
"workflowName",
workflowType,
"workflowStatus",
status.name());
}
public static void recordArchivalDelayQueueSize(int val) {
gauge(classQualifier, "workflow_archival_delay_queue_size", val);
}
public static void recordDiscardedArchivalCount() {
counter(classQualifier, "discarded_archival_count");
}
public static void recordSystemTaskWorkerPollingLimited(String queueName) {
counter(classQualifier, "system_task_worker_polling_limited", "queueName", queueName);
}
public static void recordEventQueuePollSize(String queueType, int val) {
gauge(Monitors.classQualifier, "event_queue_poll", val, "queueType", queueType);
}
public static void recordQueueMessageRepushFromRepairService(String queueName) {
counter(classQualifier, "queue_message_repushed", "queueName", queueName);
}
public static void recordTaskExecLogSize(int val) {
gauge(classQualifier, "task_exec_log_size", val);
}
}
| 6,651 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor | Create_ds/conductor/core/src/main/java/com/netflix/conductor/metrics/WorkflowMonitor.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.metrics;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.service.MetadataService;
import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER;
@Component
@ConditionalOnProperty(
name = "conductor.workflow-monitor.enabled",
havingValue = "true",
matchIfMissing = true)
public class WorkflowMonitor {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowMonitor.class);
private final MetadataService metadataService;
private final QueueDAO queueDAO;
private final ExecutionDAOFacade executionDAOFacade;
private final int metadataRefreshInterval;
private final Set<WorkflowSystemTask> asyncSystemTasks;
private List<TaskDef> taskDefs;
private List<WorkflowDef> workflowDefs;
private int refreshCounter = 0;
public WorkflowMonitor(
MetadataService metadataService,
QueueDAO queueDAO,
ExecutionDAOFacade executionDAOFacade,
@Value("${conductor.workflow-monitor.metadata-refresh-interval:10}")
int metadataRefreshInterval,
@Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set<WorkflowSystemTask> asyncSystemTasks) {
this.metadataService = metadataService;
this.queueDAO = queueDAO;
this.executionDAOFacade = executionDAOFacade;
this.metadataRefreshInterval = metadataRefreshInterval;
this.asyncSystemTasks = asyncSystemTasks;
LOGGER.info("{} initialized.", WorkflowMonitor.class.getSimpleName());
}
@Scheduled(
initialDelayString = "${conductor.workflow-monitor.stats.initial-delay:120000}",
fixedDelayString = "${conductor.workflow-monitor.stats.delay:60000}")
public void reportMetrics() {
try {
if (refreshCounter <= 0) {
workflowDefs = metadataService.getWorkflowDefs();
taskDefs = new ArrayList<>(metadataService.getTaskDefs());
refreshCounter = metadataRefreshInterval;
}
getPendingWorkflowToOwnerAppMap(workflowDefs)
.forEach(
(workflowName, ownerApp) -> {
long count =
executionDAOFacade.getPendingWorkflowCount(workflowName);
Monitors.recordRunningWorkflows(count, workflowName, ownerApp);
});
taskDefs.forEach(
taskDef -> {
long size = queueDAO.getSize(taskDef.getName());
long inProgressCount =
executionDAOFacade.getInProgressTaskCount(taskDef.getName());
Monitors.recordQueueDepth(taskDef.getName(), size, taskDef.getOwnerApp());
if (taskDef.concurrencyLimit() > 0) {
Monitors.recordTaskInProgress(
taskDef.getName(), inProgressCount, taskDef.getOwnerApp());
}
});
asyncSystemTasks.forEach(
workflowSystemTask -> {
long size = queueDAO.getSize(workflowSystemTask.getTaskType());
long inProgressCount =
executionDAOFacade.getInProgressTaskCount(
workflowSystemTask.getTaskType());
Monitors.recordQueueDepth(workflowSystemTask.getTaskType(), size, "system");
Monitors.recordTaskInProgress(
workflowSystemTask.getTaskType(), inProgressCount, "system");
});
refreshCounter--;
} catch (Exception e) {
LOGGER.error("Error while publishing scheduled metrics", e);
}
}
/**
* Pending workflow data does not contain information about version. We only need the owner app
* and workflow name, and we only need to query for the workflow once.
*/
@VisibleForTesting
Map<String, String> getPendingWorkflowToOwnerAppMap(List<WorkflowDef> workflowDefs) {
final Map<String, List<WorkflowDef>> groupedWorkflowDefs =
workflowDefs.stream().collect(Collectors.groupingBy(WorkflowDef::getName));
Map<String, String> workflowNameToOwnerMap = new HashMap<>();
groupedWorkflowDefs.forEach(
(key, value) -> {
final WorkflowDef workflowDef =
value.stream()
.max(Comparator.comparing(WorkflowDef::getVersion))
.orElseThrow(NoSuchElementException::new);
workflowNameToOwnerMap.put(key, workflowDef.getOwnerApp());
});
return workflowNameToOwnerMap;
}
}
| 6,652 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/WorkflowContext.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core;
/** Store the authentication context, app or username or both */
public class WorkflowContext {
public static final ThreadLocal<WorkflowContext> THREAD_LOCAL =
InheritableThreadLocal.withInitial(() -> new WorkflowContext("", ""));
private final String clientApp;
private final String userName;
public WorkflowContext(String clientApp) {
this.clientApp = clientApp;
this.userName = null;
}
public WorkflowContext(String clientApp, String userName) {
this.clientApp = clientApp;
this.userName = userName;
}
public static WorkflowContext get() {
return THREAD_LOCAL.get();
}
public static void set(WorkflowContext ctx) {
THREAD_LOCAL.set(ctx);
}
public static void unset() {
THREAD_LOCAL.remove();
}
/**
* @return the clientApp
*/
public String getClientApp() {
return clientApp;
}
/**
* @return the username
*/
public String getUserName() {
return userName;
}
}
| 6,653 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/LifecycleAwareComponent.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.SmartLifecycle;
public abstract class LifecycleAwareComponent implements SmartLifecycle {
private volatile boolean running = false;
private static final Logger LOGGER = LoggerFactory.getLogger(LifecycleAwareComponent.class);
@Override
public final void start() {
running = true;
LOGGER.info("{} started.", getClass().getSimpleName());
doStart();
}
@Override
public final void stop() {
running = false;
LOGGER.info("{} stopped.", getClass().getSimpleName());
doStop();
}
@Override
public final boolean isRunning() {
return running;
}
public void doStart() {}
public void doStop() {}
}
| 6,654 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/operation/WorkflowOperation.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.operation;
public interface WorkflowOperation<T, R> {
R execute(T input);
}
| 6,655 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/operation/StartWorkflowOperation.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.operation;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.event.WorkflowCreationEvent;
import com.netflix.conductor.core.event.WorkflowEvaluationEvent;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.metadata.MetadataMapperService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.ExecutionLockService;
@Component
public class StartWorkflowOperation implements WorkflowOperation<StartWorkflowInput, String> {
private static final Logger LOGGER = LoggerFactory.getLogger(StartWorkflowOperation.class);
private final MetadataMapperService metadataMapperService;
private final IDGenerator idGenerator;
private final ParametersUtils parametersUtils;
private final ExecutionDAOFacade executionDAOFacade;
private final ExecutionLockService executionLockService;
private final ApplicationEventPublisher eventPublisher;
public StartWorkflowOperation(
MetadataMapperService metadataMapperService,
IDGenerator idGenerator,
ParametersUtils parametersUtils,
ExecutionDAOFacade executionDAOFacade,
ExecutionLockService executionLockService,
ApplicationEventPublisher eventPublisher) {
this.metadataMapperService = metadataMapperService;
this.idGenerator = idGenerator;
this.parametersUtils = parametersUtils;
this.executionDAOFacade = executionDAOFacade;
this.executionLockService = executionLockService;
this.eventPublisher = eventPublisher;
}
@Override
public String execute(StartWorkflowInput input) {
return startWorkflow(input);
}
@EventListener(WorkflowCreationEvent.class)
public void handleWorkflowCreationEvent(WorkflowCreationEvent workflowCreationEvent) {
startWorkflow(workflowCreationEvent.getStartWorkflowInput());
}
private String startWorkflow(StartWorkflowInput input) {
WorkflowDef workflowDefinition;
if (input.getWorkflowDefinition() == null) {
workflowDefinition =
metadataMapperService.lookupForWorkflowDefinition(
input.getName(), input.getVersion());
} else {
workflowDefinition = input.getWorkflowDefinition();
}
workflowDefinition = metadataMapperService.populateTaskDefinitions(workflowDefinition);
// perform validations
Map<String, Object> workflowInput = input.getWorkflowInput();
String externalInputPayloadStoragePath = input.getExternalInputPayloadStoragePath();
validateWorkflow(workflowDefinition, workflowInput, externalInputPayloadStoragePath);
// Generate ID if it's not present
String workflowId =
Optional.ofNullable(input.getWorkflowId()).orElseGet(idGenerator::generate);
// Persist the Workflow
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId(workflowId);
workflow.setCorrelationId(input.getCorrelationId());
workflow.setPriority(input.getPriority() == null ? 0 : input.getPriority());
workflow.setWorkflowDefinition(workflowDefinition);
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setParentWorkflowId(input.getParentWorkflowId());
workflow.setParentWorkflowTaskId(input.getParentWorkflowTaskId());
workflow.setOwnerApp(WorkflowContext.get().getClientApp());
workflow.setCreateTime(System.currentTimeMillis());
workflow.setUpdatedBy(null);
workflow.setUpdatedTime(null);
workflow.setEvent(input.getEvent());
workflow.setTaskToDomain(input.getTaskToDomain());
workflow.setVariables(workflowDefinition.getVariables());
if (workflowInput != null && !workflowInput.isEmpty()) {
Map<String, Object> parsedInput =
parametersUtils.getWorkflowInput(workflowDefinition, workflowInput);
workflow.setInput(parsedInput);
} else {
workflow.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath);
}
try {
createAndEvaluate(workflow);
Monitors.recordWorkflowStartSuccess(
workflow.getWorkflowName(),
String.valueOf(workflow.getWorkflowVersion()),
workflow.getOwnerApp());
return workflowId;
} catch (Exception e) {
Monitors.recordWorkflowStartError(
workflowDefinition.getName(), WorkflowContext.get().getClientApp());
LOGGER.error("Unable to start workflow: {}", workflowDefinition.getName(), e);
// It's possible the remove workflow call hits an exception as well, in that case we
// want to log both errors to help diagnosis.
try {
executionDAOFacade.removeWorkflow(workflowId, false);
} catch (Exception rwe) {
LOGGER.error("Could not remove the workflowId: " + workflowId, rwe);
}
throw e;
}
}
/*
* Acquire and hold the lock till the workflow creation action is completed (in primary and secondary datastores).
* This is to ensure that workflow creation action precedes any other action on a given workflow.
*/
private void createAndEvaluate(WorkflowModel workflow) {
if (!executionLockService.acquireLock(workflow.getWorkflowId())) {
throw new TransientException("Error acquiring lock when creating workflow: {}");
}
try {
executionDAOFacade.createWorkflow(workflow);
LOGGER.debug(
"A new instance of workflow: {} created with id: {}",
workflow.getWorkflowName(),
workflow.getWorkflowId());
executionDAOFacade.populateWorkflowAndTaskPayloadData(workflow);
eventPublisher.publishEvent(new WorkflowEvaluationEvent(workflow));
} finally {
executionLockService.releaseLock(workflow.getWorkflowId());
}
}
/**
* Performs validations for starting a workflow
*
* @throws IllegalArgumentException if the validation fails.
*/
private void validateWorkflow(
WorkflowDef workflowDef,
Map<String, Object> workflowInput,
String externalStoragePath) {
// Check if the input to the workflow is not null
if (workflowInput == null && StringUtils.isBlank(externalStoragePath)) {
LOGGER.error("The input for the workflow '{}' cannot be NULL", workflowDef.getName());
Monitors.recordWorkflowStartError(
workflowDef.getName(), WorkflowContext.get().getClientApp());
throw new IllegalArgumentException("NULL input passed when starting workflow");
}
}
}
| 6,656 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowReconciler.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.reconciliation;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.LifecycleAwareComponent;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE;
/**
* Periodically polls all running workflows in the system and evaluates them for timeouts and/or
* maintain consistency.
*/
@Component
@ConditionalOnProperty(
name = "conductor.workflow-reconciler.enabled",
havingValue = "true",
matchIfMissing = true)
public class WorkflowReconciler extends LifecycleAwareComponent {
private final WorkflowSweeper workflowSweeper;
private final QueueDAO queueDAO;
private final int sweeperThreadCount;
private final int sweeperWorkflowPollTimeout;
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowReconciler.class);
public WorkflowReconciler(
WorkflowSweeper workflowSweeper, QueueDAO queueDAO, ConductorProperties properties) {
this.workflowSweeper = workflowSweeper;
this.queueDAO = queueDAO;
this.sweeperThreadCount = properties.getSweeperThreadCount();
this.sweeperWorkflowPollTimeout =
(int) properties.getSweeperWorkflowPollTimeout().toMillis();
LOGGER.info(
"WorkflowReconciler initialized with {} sweeper threads",
properties.getSweeperThreadCount());
}
@Scheduled(
fixedDelayString = "${conductor.sweep-frequency.millis:500}",
initialDelayString = "${conductor.sweep-frequency.millis:500}")
public void pollAndSweep() {
try {
if (!isRunning()) {
LOGGER.debug("Component stopped, skip workflow sweep");
} else {
List<String> workflowIds =
queueDAO.pop(DECIDER_QUEUE, sweeperThreadCount, sweeperWorkflowPollTimeout);
if (workflowIds != null) {
// wait for all workflow ids to be "swept"
CompletableFuture.allOf(
workflowIds.stream()
.map(workflowSweeper::sweepAsync)
.toArray(CompletableFuture[]::new))
.get();
LOGGER.debug(
"Sweeper processed {} from the decider queue",
String.join(",", workflowIds));
}
// NOTE: Disabling the sweeper implicitly disables this metric.
recordQueueDepth();
}
} catch (Exception e) {
Monitors.error(WorkflowReconciler.class.getSimpleName(), "poll");
LOGGER.error("Error when polling for workflows", e);
if (e instanceof InterruptedException) {
// Restore interrupted state...
Thread.currentThread().interrupt();
}
}
}
private void recordQueueDepth() {
int currentQueueSize = queueDAO.getSize(DECIDER_QUEUE);
Monitors.recordGauge(DECIDER_QUEUE, currentQueueSize);
}
}
| 6,657 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowRepairService.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.reconciliation;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Predicate;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.core.utils.Utils;
import com.netflix.conductor.dao.ExecutionDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
/**
* A helper service that tries to keep ExecutionDAO and QueueDAO in sync, based on the task or
* workflow state.
*
* <p>This service expects that the underlying Queueing layer implements {@link
* QueueDAO#containsMessage(String, String)} method. This can be controlled with <code>
* conductor.workflow-repair-service.enabled</code> property.
*/
@Service
@ConditionalOnProperty(name = "conductor.workflow-repair-service.enabled", havingValue = "true")
public class WorkflowRepairService {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowRepairService.class);
private final ExecutionDAO executionDAO;
private final QueueDAO queueDAO;
private final ConductorProperties properties;
private SystemTaskRegistry systemTaskRegistry;
/*
For system task -> Verify the task isAsync() and not isAsyncComplete() or isAsyncComplete() in SCHEDULED state,
and in SCHEDULED or IN_PROGRESS state. (Example: SUB_WORKFLOW tasks in SCHEDULED state)
For simple task -> Verify the task is in SCHEDULED state.
*/
private final Predicate<TaskModel> isTaskRepairable =
task -> {
if (systemTaskRegistry.isSystemTask(task.getTaskType())) { // If system task
WorkflowSystemTask workflowSystemTask =
systemTaskRegistry.get(task.getTaskType());
return workflowSystemTask.isAsync()
&& (!workflowSystemTask.isAsyncComplete(task)
|| (workflowSystemTask.isAsyncComplete(task)
&& task.getStatus() == TaskModel.Status.SCHEDULED))
&& (task.getStatus() == TaskModel.Status.IN_PROGRESS
|| task.getStatus() == TaskModel.Status.SCHEDULED);
} else { // Else if simple task
return task.getStatus() == TaskModel.Status.SCHEDULED;
}
};
public WorkflowRepairService(
ExecutionDAO executionDAO,
QueueDAO queueDAO,
ConductorProperties properties,
SystemTaskRegistry systemTaskRegistry) {
this.executionDAO = executionDAO;
this.queueDAO = queueDAO;
this.properties = properties;
this.systemTaskRegistry = systemTaskRegistry;
LOGGER.info("WorkflowRepairService Initialized");
}
/**
* Verify and repair if the workflowId exists in deciderQueue, and then if each scheduled task
* has relevant message in the queue.
*/
public boolean verifyAndRepairWorkflow(String workflowId, boolean includeTasks) {
WorkflowModel workflow = executionDAO.getWorkflow(workflowId, includeTasks);
AtomicBoolean repaired = new AtomicBoolean(false);
repaired.set(verifyAndRepairDeciderQueue(workflow));
if (includeTasks) {
workflow.getTasks().forEach(task -> repaired.set(verifyAndRepairTask(task)));
}
return repaired.get();
}
/** Verify and repair tasks in a workflow. */
public void verifyAndRepairWorkflowTasks(String workflowId) {
WorkflowModel workflow =
Optional.ofNullable(executionDAO.getWorkflow(workflowId, true))
.orElseThrow(
() ->
new NotFoundException(
"Could not find workflow: " + workflowId));
verifyAndRepairWorkflowTasks(workflow);
}
/** Verify and repair tasks in a workflow. */
public void verifyAndRepairWorkflowTasks(WorkflowModel workflow) {
workflow.getTasks().forEach(this::verifyAndRepairTask);
// repair the parent workflow if needed
verifyAndRepairWorkflow(workflow.getParentWorkflowId());
}
/**
* Verify and fix if Workflow decider queue contains this workflowId.
*
* @return true - if the workflow was queued for repair
*/
private boolean verifyAndRepairDeciderQueue(WorkflowModel workflow) {
if (!workflow.getStatus().isTerminal()) {
return verifyAndRepairWorkflow(workflow.getWorkflowId());
}
return false;
}
/**
* Verify if ExecutionDAO and QueueDAO agree for the provided task.
*
* @param task the task to be repaired
* @return true - if the task was queued for repair
*/
@VisibleForTesting
boolean verifyAndRepairTask(TaskModel task) {
if (isTaskRepairable.test(task)) {
// Ensure QueueDAO contains this taskId
String taskQueueName = QueueUtils.getQueueName(task);
if (!queueDAO.containsMessage(taskQueueName, task.getTaskId())) {
queueDAO.push(taskQueueName, task.getTaskId(), task.getCallbackAfterSeconds());
LOGGER.info(
"Task {} in workflow {} re-queued for repairs",
task.getTaskId(),
task.getWorkflowInstanceId());
Monitors.recordQueueMessageRepushFromRepairService(task.getTaskDefName());
return true;
}
} else if (task.getTaskType().equals(TaskType.TASK_TYPE_SUB_WORKFLOW)
&& task.getStatus() == TaskModel.Status.IN_PROGRESS) {
WorkflowModel subWorkflow = executionDAO.getWorkflow(task.getSubWorkflowId(), false);
if (subWorkflow.getStatus().isTerminal()) {
LOGGER.info(
"Repairing sub workflow task {} for sub workflow {} in workflow {}",
task.getTaskId(),
task.getSubWorkflowId(),
task.getWorkflowInstanceId());
repairSubWorkflowTask(task, subWorkflow);
return true;
}
}
return false;
}
private boolean verifyAndRepairWorkflow(String workflowId) {
if (StringUtils.isNotEmpty(workflowId)) {
String queueName = Utils.DECIDER_QUEUE;
if (!queueDAO.containsMessage(queueName, workflowId)) {
queueDAO.push(
queueName, workflowId, properties.getWorkflowOffsetTimeout().getSeconds());
LOGGER.info("Workflow {} re-queued for repairs", workflowId);
Monitors.recordQueueMessageRepushFromRepairService(queueName);
return true;
}
return false;
}
return false;
}
private void repairSubWorkflowTask(TaskModel task, WorkflowModel subWorkflow) {
switch (subWorkflow.getStatus()) {
case COMPLETED:
task.setStatus(TaskModel.Status.COMPLETED);
break;
case FAILED:
task.setStatus(TaskModel.Status.FAILED);
break;
case TERMINATED:
task.setStatus(TaskModel.Status.CANCELED);
break;
case TIMED_OUT:
task.setStatus(TaskModel.Status.TIMED_OUT);
break;
}
task.addOutput(subWorkflow.getOutput());
executionDAO.updateTask(task);
}
}
| 6,658 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/reconciliation/WorkflowSweeper.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.reconciliation;
import java.time.Instant;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.TaskModel.Status;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.core.config.SchedulerConfiguration.SWEEPER_EXECUTOR_NAME;
import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE;
@Component
public class WorkflowSweeper {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowSweeper.class);
private final ConductorProperties properties;
private final WorkflowExecutor workflowExecutor;
private final WorkflowRepairService workflowRepairService;
private final QueueDAO queueDAO;
private final ExecutionDAOFacade executionDAOFacade;
private static final String CLASS_NAME = WorkflowSweeper.class.getSimpleName();
@Autowired
public WorkflowSweeper(
WorkflowExecutor workflowExecutor,
Optional<WorkflowRepairService> workflowRepairService,
ConductorProperties properties,
QueueDAO queueDAO,
ExecutionDAOFacade executionDAOFacade) {
this.properties = properties;
this.queueDAO = queueDAO;
this.workflowExecutor = workflowExecutor;
this.executionDAOFacade = executionDAOFacade;
this.workflowRepairService = workflowRepairService.orElse(null);
LOGGER.info("WorkflowSweeper initialized.");
}
@Async(SWEEPER_EXECUTOR_NAME)
public CompletableFuture<Void> sweepAsync(String workflowId) {
sweep(workflowId);
return CompletableFuture.completedFuture(null);
}
public void sweep(String workflowId) {
WorkflowModel workflow = null;
try {
WorkflowContext workflowContext = new WorkflowContext(properties.getAppId());
WorkflowContext.set(workflowContext);
LOGGER.debug("Running sweeper for workflow {}", workflowId);
workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (workflowRepairService != null) {
// Verify and repair tasks in the workflow.
workflowRepairService.verifyAndRepairWorkflowTasks(workflow);
}
workflow = workflowExecutor.decideWithLock(workflow);
if (workflow != null && workflow.getStatus().isTerminal()) {
queueDAO.remove(DECIDER_QUEUE, workflowId);
return;
}
} catch (NotFoundException nfe) {
queueDAO.remove(DECIDER_QUEUE, workflowId);
LOGGER.info(
"Workflow NOT found for id:{}. Removed it from decider queue", workflowId, nfe);
return;
} catch (Exception e) {
Monitors.error(CLASS_NAME, "sweep");
LOGGER.error("Error running sweep for " + workflowId, e);
}
long workflowOffsetTimeout =
workflowOffsetWithJitter(properties.getWorkflowOffsetTimeout().getSeconds());
if (workflow != null) {
long startTime = Instant.now().toEpochMilli();
unack(workflow, workflowOffsetTimeout);
long endTime = Instant.now().toEpochMilli();
Monitors.recordUnackTime(workflow.getWorkflowName(), endTime - startTime);
} else {
LOGGER.warn(
"Workflow with {} id can not be found. Attempting to unack using the id",
workflowId);
queueDAO.setUnackTimeout(DECIDER_QUEUE, workflowId, workflowOffsetTimeout * 1000);
}
}
@VisibleForTesting
void unack(WorkflowModel workflowModel, long workflowOffsetTimeout) {
long postponeDurationSeconds = 0;
for (TaskModel taskModel : workflowModel.getTasks()) {
if (taskModel.getStatus() == Status.IN_PROGRESS) {
if (taskModel.getTaskType().equals(TaskType.TASK_TYPE_WAIT)) {
if (taskModel.getWaitTimeout() == 0) {
postponeDurationSeconds = workflowOffsetTimeout;
} else {
long deltaInSeconds =
(taskModel.getWaitTimeout() - System.currentTimeMillis()) / 1000;
postponeDurationSeconds = (deltaInSeconds > 0) ? deltaInSeconds : 0;
}
} else if (taskModel.getTaskType().equals(TaskType.TASK_TYPE_HUMAN)) {
postponeDurationSeconds = workflowOffsetTimeout;
} else {
postponeDurationSeconds =
(taskModel.getResponseTimeoutSeconds() != 0)
? taskModel.getResponseTimeoutSeconds() + 1
: workflowOffsetTimeout;
}
break;
} else if (taskModel.getStatus() == Status.SCHEDULED) {
Optional<TaskDef> taskDefinition = taskModel.getTaskDefinition();
if (taskDefinition.isPresent()) {
TaskDef taskDef = taskDefinition.get();
if (taskDef.getPollTimeoutSeconds() != null
&& taskDef.getPollTimeoutSeconds() != 0) {
postponeDurationSeconds = taskDef.getPollTimeoutSeconds() + 1;
} else {
postponeDurationSeconds =
(workflowModel.getWorkflowDefinition().getTimeoutSeconds() != 0)
? workflowModel.getWorkflowDefinition().getTimeoutSeconds()
+ 1
: workflowOffsetTimeout;
}
} else {
postponeDurationSeconds =
(workflowModel.getWorkflowDefinition().getTimeoutSeconds() != 0)
? workflowModel.getWorkflowDefinition().getTimeoutSeconds() + 1
: workflowOffsetTimeout;
}
break;
}
}
queueDAO.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), postponeDurationSeconds * 1000);
}
/**
* jitter will be +- (1/3) workflowOffsetTimeout for example, if workflowOffsetTimeout is 45
* seconds, this function returns values between [30-60] seconds
*
* @param workflowOffsetTimeout
* @return
*/
@VisibleForTesting
long workflowOffsetWithJitter(long workflowOffsetTimeout) {
long range = workflowOffsetTimeout / 3;
long jitter = new Random().nextInt((int) (2 * range + 1)) - range;
return workflowOffsetTimeout + jitter;
}
}
| 6,659 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/listener/TaskStatusListener.java | /*
* Copyright 2023 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.listener;
import com.netflix.conductor.model.TaskModel;
/**
* Listener for the Task status change. All methods have default implementation so that
* Implementation can choose to override a subset of interested Task statuses.
*/
public interface TaskStatusListener {
default void onTaskScheduled(TaskModel task) {}
default void onTaskInProgress(TaskModel task) {}
default void onTaskCanceled(TaskModel task) {}
default void onTaskFailed(TaskModel task) {}
default void onTaskFailedWithTerminalError(TaskModel task) {}
default void onTaskCompleted(TaskModel task) {}
default void onTaskCompletedWithErrors(TaskModel task) {}
default void onTaskTimedOut(TaskModel task) {}
default void onTaskSkipped(TaskModel task) {}
}
| 6,660 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListener.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.listener;
import com.netflix.conductor.model.WorkflowModel;
/** Listener for the completed and terminated workflows */
public interface WorkflowStatusListener {
default void onWorkflowCompletedIfEnabled(WorkflowModel workflow) {
if (workflow.getWorkflowDefinition().isWorkflowStatusListenerEnabled()) {
onWorkflowCompleted(workflow);
}
}
default void onWorkflowTerminatedIfEnabled(WorkflowModel workflow) {
if (workflow.getWorkflowDefinition().isWorkflowStatusListenerEnabled()) {
onWorkflowTerminated(workflow);
}
}
default void onWorkflowFinalizedIfEnabled(WorkflowModel workflow) {
if (workflow.getWorkflowDefinition().isWorkflowStatusListenerEnabled()) {
onWorkflowFinalized(workflow);
}
}
void onWorkflowCompleted(WorkflowModel workflow);
void onWorkflowTerminated(WorkflowModel workflow);
default void onWorkflowFinalized(WorkflowModel workflow) {}
}
| 6,661 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/listener/TaskStatusListenerStub.java | /*
* Copyright 2023 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.listener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.conductor.model.TaskModel;
/** Stub listener default implementation */
public class TaskStatusListenerStub implements TaskStatusListener {
private static final Logger LOGGER = LoggerFactory.getLogger(TaskStatusListenerStub.class);
@Override
public void onTaskScheduled(TaskModel task) {
LOGGER.debug("Task {} is scheduled", task.getTaskId());
}
@Override
public void onTaskCanceled(TaskModel task) {
LOGGER.debug("Task {} is canceled", task.getTaskId());
}
@Override
public void onTaskCompleted(TaskModel task) {
LOGGER.debug("Task {} is completed", task.getTaskId());
}
@Override
public void onTaskCompletedWithErrors(TaskModel task) {
LOGGER.debug("Task {} is completed with errors", task.getTaskId());
}
@Override
public void onTaskFailed(TaskModel task) {
LOGGER.debug("Task {} is failed", task.getTaskId());
}
@Override
public void onTaskFailedWithTerminalError(TaskModel task) {
LOGGER.debug("Task {} is failed with terminal error", task.getTaskId());
}
@Override
public void onTaskInProgress(TaskModel task) {
LOGGER.debug("Task {} is in-progress", task.getTaskId());
}
@Override
public void onTaskSkipped(TaskModel task) {
LOGGER.debug("Task {} is skipped", task.getTaskId());
}
@Override
public void onTaskTimedOut(TaskModel task) {
LOGGER.debug("Task {} is timed out", task.getTaskId());
}
}
| 6,662 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/listener/WorkflowStatusListenerStub.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.listener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.conductor.model.WorkflowModel;
/** Stub listener default implementation */
public class WorkflowStatusListenerStub implements WorkflowStatusListener {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowStatusListenerStub.class);
@Override
public void onWorkflowCompleted(WorkflowModel workflow) {
LOGGER.debug("Workflow {} is completed", workflow.getWorkflowId());
}
@Override
public void onWorkflowTerminated(WorkflowModel workflow) {
LOGGER.debug("Workflow {} is terminated", workflow.getWorkflowId());
}
@Override
public void onWorkflowFinalized(WorkflowModel workflow) {
LOGGER.debug("Workflow {} is finalized", workflow.getWorkflowId());
}
}
| 6,663 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/config/ConductorCoreConfiguration.java | /*
* Copyright 2021 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.config;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.stream.Collectors;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.retry.support.RetryTemplate;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.core.events.EventQueueProvider;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.mapper.TaskMapper;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.listener.TaskStatusListener;
import com.netflix.conductor.core.listener.TaskStatusListenerStub;
import com.netflix.conductor.core.listener.WorkflowStatusListener;
import com.netflix.conductor.core.listener.WorkflowStatusListenerStub;
import com.netflix.conductor.core.storage.DummyPayloadStorage;
import com.netflix.conductor.core.sync.Lock;
import com.netflix.conductor.core.sync.noop.NoopLock;
import static com.netflix.conductor.core.events.EventQueues.EVENT_QUEUE_PROVIDERS_QUALIFIER;
import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER;
import static java.util.function.Function.identity;
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties(ConductorProperties.class)
public class ConductorCoreConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(ConductorCoreConfiguration.class);
@ConditionalOnProperty(
name = "conductor.workflow-execution-lock.type",
havingValue = "noop_lock",
matchIfMissing = true)
@Bean
public Lock provideLock() {
return new NoopLock();
}
@ConditionalOnProperty(
name = "conductor.external-payload-storage.type",
havingValue = "dummy",
matchIfMissing = true)
@Bean
public ExternalPayloadStorage dummyExternalPayloadStorage() {
LOGGER.info("Initialized dummy payload storage!");
return new DummyPayloadStorage();
}
@ConditionalOnProperty(
name = "conductor.workflow-status-listener.type",
havingValue = "stub",
matchIfMissing = true)
@Bean
public WorkflowStatusListener workflowStatusListener() {
return new WorkflowStatusListenerStub();
}
@ConditionalOnProperty(
name = "conductor.task-status-listener.type",
havingValue = "stub",
matchIfMissing = true)
@Bean
public TaskStatusListener taskStatusListener() {
return new TaskStatusListenerStub();
}
@Bean
public ExecutorService executorService(ConductorProperties conductorProperties) {
ThreadFactory threadFactory =
new BasicThreadFactory.Builder()
.namingPattern("conductor-worker-%d")
.daemon(true)
.build();
return Executors.newFixedThreadPool(
conductorProperties.getExecutorServiceMaxThreadCount(), threadFactory);
}
@Bean
@Qualifier("taskMappersByTaskType")
public Map<String, TaskMapper> getTaskMappers(List<TaskMapper> taskMappers) {
return taskMappers.stream().collect(Collectors.toMap(TaskMapper::getTaskType, identity()));
}
@Bean
@Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER)
public Set<WorkflowSystemTask> asyncSystemTasks(Set<WorkflowSystemTask> allSystemTasks) {
return allSystemTasks.stream()
.filter(WorkflowSystemTask::isAsync)
.collect(Collectors.toUnmodifiableSet());
}
@Bean
@Qualifier(EVENT_QUEUE_PROVIDERS_QUALIFIER)
public Map<String, EventQueueProvider> getEventQueueProviders(
List<EventQueueProvider> eventQueueProviders) {
return eventQueueProviders.stream()
.collect(Collectors.toMap(EventQueueProvider::getQueueType, identity()));
}
@Bean
public RetryTemplate onTransientErrorRetryTemplate() {
return RetryTemplate.builder()
.retryOn(TransientException.class)
.maxAttempts(3)
.noBackoff()
.build();
}
}
| 6,664 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/config/ConductorProperties.java | /*
* Copyright 2021 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.config;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.convert.DataSizeUnit;
import org.springframework.boot.convert.DurationUnit;
import org.springframework.util.unit.DataSize;
import org.springframework.util.unit.DataUnit;
@ConfigurationProperties("conductor.app")
public class ConductorProperties {
/**
* Name of the stack within which the app is running. e.g. devint, testintg, staging, prod etc.
*/
private String stack = "test";
/** The id with the app has been registered. */
private String appId = "conductor";
/** The maximum number of threads to be allocated to the executor service threadpool. */
private int executorServiceMaxThreadCount = 50;
/** The timeout duration to set when a workflow is pushed to the decider queue. */
@DurationUnit(ChronoUnit.SECONDS)
private Duration workflowOffsetTimeout = Duration.ofSeconds(30);
/** The number of threads to use to do background sweep on active workflows. */
private int sweeperThreadCount = Runtime.getRuntime().availableProcessors() * 2;
/** The timeout (in milliseconds) for the polling of workflows to be swept. */
private Duration sweeperWorkflowPollTimeout = Duration.ofMillis(2000);
/** The number of threads to configure the threadpool in the event processor. */
private int eventProcessorThreadCount = 2;
/** Used to enable/disable the indexing of messages within event payloads. */
private boolean eventMessageIndexingEnabled = true;
/** Used to enable/disable the indexing of event execution results. */
private boolean eventExecutionIndexingEnabled = true;
/** Used to enable/disable the workflow execution lock. */
private boolean workflowExecutionLockEnabled = false;
/** The time (in milliseconds) for which the lock is leased for. */
private Duration lockLeaseTime = Duration.ofMillis(60000);
/**
* The time (in milliseconds) for which the thread will block in an attempt to acquire the lock.
*/
private Duration lockTimeToTry = Duration.ofMillis(500);
/**
* The time (in seconds) that is used to consider if a worker is actively polling for a task.
*/
@DurationUnit(ChronoUnit.SECONDS)
private Duration activeWorkerLastPollTimeout = Duration.ofSeconds(10);
/**
* The time (in seconds) for which a task execution will be postponed if being rate limited or
* concurrent execution limited.
*/
@DurationUnit(ChronoUnit.SECONDS)
private Duration taskExecutionPostponeDuration = Duration.ofSeconds(60);
/** Used to enable/disable the indexing of task execution logs. */
private boolean taskExecLogIndexingEnabled = true;
/** Used to enable/disable asynchronous indexing to elasticsearch. */
private boolean asyncIndexingEnabled = false;
/** The number of threads to be used within the threadpool for system task workers. */
private int systemTaskWorkerThreadCount = Runtime.getRuntime().availableProcessors() * 2;
/**
* The interval (in seconds) after which a system task will be checked by the system task worker
* for completion.
*/
@DurationUnit(ChronoUnit.SECONDS)
private Duration systemTaskWorkerCallbackDuration = Duration.ofSeconds(30);
/**
* The interval (in milliseconds) at which system task queues will be polled by the system task
* workers.
*/
private Duration systemTaskWorkerPollInterval = Duration.ofMillis(50);
/** The namespace for the system task workers to provide instance level isolation. */
private String systemTaskWorkerExecutionNamespace = "";
/**
* The number of threads to be used within the threadpool for system task workers in each
* isolation group.
*/
private int isolatedSystemTaskWorkerThreadCount = 1;
/**
* The duration of workflow execution which qualifies a workflow as a short-running workflow
* when async indexing to elasticsearch is enabled.
*/
@DurationUnit(ChronoUnit.SECONDS)
private Duration asyncUpdateShortRunningWorkflowDuration = Duration.ofSeconds(30);
/**
* The delay with which short-running workflows will be updated in the elasticsearch index when
* async indexing is enabled.
*/
@DurationUnit(ChronoUnit.SECONDS)
private Duration asyncUpdateDelay = Duration.ofSeconds(60);
/**
* Used to control the validation for owner email field as mandatory within workflow and task
* definitions.
*/
private boolean ownerEmailMandatory = true;
/**
* The number of threads to be usde in Scheduler used for polling events from multiple event
* queues. By default, a thread count equal to the number of CPU cores is chosen.
*/
private int eventQueueSchedulerPollThreadCount = Runtime.getRuntime().availableProcessors();
/** The time interval (in milliseconds) at which the default event queues will be polled. */
private Duration eventQueuePollInterval = Duration.ofMillis(100);
/** The number of messages to be polled from a default event queue in a single operation. */
private int eventQueuePollCount = 10;
/** The timeout (in milliseconds) for the poll operation on the default event queue. */
private Duration eventQueueLongPollTimeout = Duration.ofMillis(1000);
/**
* The threshold of the workflow input payload size in KB beyond which the payload will be
* stored in {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize workflowInputPayloadSizeThreshold = DataSize.ofKilobytes(5120L);
/**
* The maximum threshold of the workflow input payload size in KB beyond which input will be
* rejected and the workflow will be marked as FAILED.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize maxWorkflowInputPayloadSizeThreshold = DataSize.ofKilobytes(10240L);
/**
* The threshold of the workflow output payload size in KB beyond which the payload will be
* stored in {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize workflowOutputPayloadSizeThreshold = DataSize.ofKilobytes(5120L);
/**
* The maximum threshold of the workflow output payload size in KB beyond which output will be
* rejected and the workflow will be marked as FAILED.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize maxWorkflowOutputPayloadSizeThreshold = DataSize.ofKilobytes(10240L);
/**
* The threshold of the task input payload size in KB beyond which the payload will be stored in
* {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize taskInputPayloadSizeThreshold = DataSize.ofKilobytes(3072L);
/**
* The maximum threshold of the task input payload size in KB beyond which the task input will
* be rejected and the task will be marked as FAILED_WITH_TERMINAL_ERROR.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize maxTaskInputPayloadSizeThreshold = DataSize.ofKilobytes(10240L);
/**
* The threshold of the task output payload size in KB beyond which the payload will be stored
* in {@link com.netflix.conductor.common.utils.ExternalPayloadStorage}.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize taskOutputPayloadSizeThreshold = DataSize.ofKilobytes(3072L);
/**
* The maximum threshold of the task output payload size in KB beyond which the task input will
* be rejected and the task will be marked as FAILED_WITH_TERMINAL_ERROR.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize maxTaskOutputPayloadSizeThreshold = DataSize.ofKilobytes(10240L);
/**
* The maximum threshold of the workflow variables payload size in KB beyond which the task
* changes will be rejected and the task will be marked as FAILED_WITH_TERMINAL_ERROR.
*/
@DataSizeUnit(DataUnit.KILOBYTES)
private DataSize maxWorkflowVariablesPayloadSizeThreshold = DataSize.ofKilobytes(256L);
/** Used to limit the size of task execution logs. */
private int taskExecLogSizeLimit = 10;
public String getStack() {
return stack;
}
public void setStack(String stack) {
this.stack = stack;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public int getExecutorServiceMaxThreadCount() {
return executorServiceMaxThreadCount;
}
public void setExecutorServiceMaxThreadCount(int executorServiceMaxThreadCount) {
this.executorServiceMaxThreadCount = executorServiceMaxThreadCount;
}
public Duration getWorkflowOffsetTimeout() {
return workflowOffsetTimeout;
}
public void setWorkflowOffsetTimeout(Duration workflowOffsetTimeout) {
this.workflowOffsetTimeout = workflowOffsetTimeout;
}
public int getSweeperThreadCount() {
return sweeperThreadCount;
}
public void setSweeperThreadCount(int sweeperThreadCount) {
this.sweeperThreadCount = sweeperThreadCount;
}
public Duration getSweeperWorkflowPollTimeout() {
return sweeperWorkflowPollTimeout;
}
public void setSweeperWorkflowPollTimeout(Duration sweeperWorkflowPollTimeout) {
this.sweeperWorkflowPollTimeout = sweeperWorkflowPollTimeout;
}
public int getEventProcessorThreadCount() {
return eventProcessorThreadCount;
}
public void setEventProcessorThreadCount(int eventProcessorThreadCount) {
this.eventProcessorThreadCount = eventProcessorThreadCount;
}
public boolean isEventMessageIndexingEnabled() {
return eventMessageIndexingEnabled;
}
public void setEventMessageIndexingEnabled(boolean eventMessageIndexingEnabled) {
this.eventMessageIndexingEnabled = eventMessageIndexingEnabled;
}
public boolean isEventExecutionIndexingEnabled() {
return eventExecutionIndexingEnabled;
}
public void setEventExecutionIndexingEnabled(boolean eventExecutionIndexingEnabled) {
this.eventExecutionIndexingEnabled = eventExecutionIndexingEnabled;
}
public boolean isWorkflowExecutionLockEnabled() {
return workflowExecutionLockEnabled;
}
public void setWorkflowExecutionLockEnabled(boolean workflowExecutionLockEnabled) {
this.workflowExecutionLockEnabled = workflowExecutionLockEnabled;
}
public Duration getLockLeaseTime() {
return lockLeaseTime;
}
public void setLockLeaseTime(Duration lockLeaseTime) {
this.lockLeaseTime = lockLeaseTime;
}
public Duration getLockTimeToTry() {
return lockTimeToTry;
}
public void setLockTimeToTry(Duration lockTimeToTry) {
this.lockTimeToTry = lockTimeToTry;
}
public Duration getActiveWorkerLastPollTimeout() {
return activeWorkerLastPollTimeout;
}
public void setActiveWorkerLastPollTimeout(Duration activeWorkerLastPollTimeout) {
this.activeWorkerLastPollTimeout = activeWorkerLastPollTimeout;
}
public Duration getTaskExecutionPostponeDuration() {
return taskExecutionPostponeDuration;
}
public void setTaskExecutionPostponeDuration(Duration taskExecutionPostponeDuration) {
this.taskExecutionPostponeDuration = taskExecutionPostponeDuration;
}
public boolean isTaskExecLogIndexingEnabled() {
return taskExecLogIndexingEnabled;
}
public void setTaskExecLogIndexingEnabled(boolean taskExecLogIndexingEnabled) {
this.taskExecLogIndexingEnabled = taskExecLogIndexingEnabled;
}
public boolean isAsyncIndexingEnabled() {
return asyncIndexingEnabled;
}
public void setAsyncIndexingEnabled(boolean asyncIndexingEnabled) {
this.asyncIndexingEnabled = asyncIndexingEnabled;
}
public int getSystemTaskWorkerThreadCount() {
return systemTaskWorkerThreadCount;
}
public void setSystemTaskWorkerThreadCount(int systemTaskWorkerThreadCount) {
this.systemTaskWorkerThreadCount = systemTaskWorkerThreadCount;
}
public Duration getSystemTaskWorkerCallbackDuration() {
return systemTaskWorkerCallbackDuration;
}
public void setSystemTaskWorkerCallbackDuration(Duration systemTaskWorkerCallbackDuration) {
this.systemTaskWorkerCallbackDuration = systemTaskWorkerCallbackDuration;
}
public Duration getSystemTaskWorkerPollInterval() {
return systemTaskWorkerPollInterval;
}
public void setSystemTaskWorkerPollInterval(Duration systemTaskWorkerPollInterval) {
this.systemTaskWorkerPollInterval = systemTaskWorkerPollInterval;
}
public String getSystemTaskWorkerExecutionNamespace() {
return systemTaskWorkerExecutionNamespace;
}
public void setSystemTaskWorkerExecutionNamespace(String systemTaskWorkerExecutionNamespace) {
this.systemTaskWorkerExecutionNamespace = systemTaskWorkerExecutionNamespace;
}
public int getIsolatedSystemTaskWorkerThreadCount() {
return isolatedSystemTaskWorkerThreadCount;
}
public void setIsolatedSystemTaskWorkerThreadCount(int isolatedSystemTaskWorkerThreadCount) {
this.isolatedSystemTaskWorkerThreadCount = isolatedSystemTaskWorkerThreadCount;
}
public Duration getAsyncUpdateShortRunningWorkflowDuration() {
return asyncUpdateShortRunningWorkflowDuration;
}
public void setAsyncUpdateShortRunningWorkflowDuration(
Duration asyncUpdateShortRunningWorkflowDuration) {
this.asyncUpdateShortRunningWorkflowDuration = asyncUpdateShortRunningWorkflowDuration;
}
public Duration getAsyncUpdateDelay() {
return asyncUpdateDelay;
}
public void setAsyncUpdateDelay(Duration asyncUpdateDelay) {
this.asyncUpdateDelay = asyncUpdateDelay;
}
public boolean isOwnerEmailMandatory() {
return ownerEmailMandatory;
}
public void setOwnerEmailMandatory(boolean ownerEmailMandatory) {
this.ownerEmailMandatory = ownerEmailMandatory;
}
public int getEventQueueSchedulerPollThreadCount() {
return eventQueueSchedulerPollThreadCount;
}
public void setEventQueueSchedulerPollThreadCount(int eventQueueSchedulerPollThreadCount) {
this.eventQueueSchedulerPollThreadCount = eventQueueSchedulerPollThreadCount;
}
public Duration getEventQueuePollInterval() {
return eventQueuePollInterval;
}
public void setEventQueuePollInterval(Duration eventQueuePollInterval) {
this.eventQueuePollInterval = eventQueuePollInterval;
}
public int getEventQueuePollCount() {
return eventQueuePollCount;
}
public void setEventQueuePollCount(int eventQueuePollCount) {
this.eventQueuePollCount = eventQueuePollCount;
}
public Duration getEventQueueLongPollTimeout() {
return eventQueueLongPollTimeout;
}
public void setEventQueueLongPollTimeout(Duration eventQueueLongPollTimeout) {
this.eventQueueLongPollTimeout = eventQueueLongPollTimeout;
}
public DataSize getWorkflowInputPayloadSizeThreshold() {
return workflowInputPayloadSizeThreshold;
}
public void setWorkflowInputPayloadSizeThreshold(DataSize workflowInputPayloadSizeThreshold) {
this.workflowInputPayloadSizeThreshold = workflowInputPayloadSizeThreshold;
}
public DataSize getMaxWorkflowInputPayloadSizeThreshold() {
return maxWorkflowInputPayloadSizeThreshold;
}
public void setMaxWorkflowInputPayloadSizeThreshold(
DataSize maxWorkflowInputPayloadSizeThreshold) {
this.maxWorkflowInputPayloadSizeThreshold = maxWorkflowInputPayloadSizeThreshold;
}
public DataSize getWorkflowOutputPayloadSizeThreshold() {
return workflowOutputPayloadSizeThreshold;
}
public void setWorkflowOutputPayloadSizeThreshold(DataSize workflowOutputPayloadSizeThreshold) {
this.workflowOutputPayloadSizeThreshold = workflowOutputPayloadSizeThreshold;
}
public DataSize getMaxWorkflowOutputPayloadSizeThreshold() {
return maxWorkflowOutputPayloadSizeThreshold;
}
public void setMaxWorkflowOutputPayloadSizeThreshold(
DataSize maxWorkflowOutputPayloadSizeThreshold) {
this.maxWorkflowOutputPayloadSizeThreshold = maxWorkflowOutputPayloadSizeThreshold;
}
public DataSize getTaskInputPayloadSizeThreshold() {
return taskInputPayloadSizeThreshold;
}
public void setTaskInputPayloadSizeThreshold(DataSize taskInputPayloadSizeThreshold) {
this.taskInputPayloadSizeThreshold = taskInputPayloadSizeThreshold;
}
public DataSize getMaxTaskInputPayloadSizeThreshold() {
return maxTaskInputPayloadSizeThreshold;
}
public void setMaxTaskInputPayloadSizeThreshold(DataSize maxTaskInputPayloadSizeThreshold) {
this.maxTaskInputPayloadSizeThreshold = maxTaskInputPayloadSizeThreshold;
}
public DataSize getTaskOutputPayloadSizeThreshold() {
return taskOutputPayloadSizeThreshold;
}
public void setTaskOutputPayloadSizeThreshold(DataSize taskOutputPayloadSizeThreshold) {
this.taskOutputPayloadSizeThreshold = taskOutputPayloadSizeThreshold;
}
public DataSize getMaxTaskOutputPayloadSizeThreshold() {
return maxTaskOutputPayloadSizeThreshold;
}
public void setMaxTaskOutputPayloadSizeThreshold(DataSize maxTaskOutputPayloadSizeThreshold) {
this.maxTaskOutputPayloadSizeThreshold = maxTaskOutputPayloadSizeThreshold;
}
public DataSize getMaxWorkflowVariablesPayloadSizeThreshold() {
return maxWorkflowVariablesPayloadSizeThreshold;
}
public void setMaxWorkflowVariablesPayloadSizeThreshold(
DataSize maxWorkflowVariablesPayloadSizeThreshold) {
this.maxWorkflowVariablesPayloadSizeThreshold = maxWorkflowVariablesPayloadSizeThreshold;
}
public int getTaskExecLogSizeLimit() {
return taskExecLogSizeLimit;
}
public void setTaskExecLogSizeLimit(int taskExecLogSizeLimit) {
this.taskExecLogSizeLimit = taskExecLogSizeLimit;
}
/**
* @return Returns all the configurations in a map.
*/
public Map<String, Object> getAll() {
Map<String, Object> map = new HashMap<>();
Properties props = System.getProperties();
props.forEach((key, value) -> map.put(key.toString(), value));
return map;
}
}
| 6,665 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/config/SchedulerConfiguration.java | /*
* Copyright 2021 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.config;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.SchedulingConfigurer;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.scheduling.config.ScheduledTaskRegistrar;
import rx.Scheduler;
import rx.schedulers.Schedulers;
@Configuration(proxyBeanMethods = false)
@EnableScheduling
@EnableAsync
public class SchedulerConfiguration implements SchedulingConfigurer {
public static final String SWEEPER_EXECUTOR_NAME = "WorkflowSweeperExecutor";
/**
* Used by some {@link com.netflix.conductor.core.events.queue.ObservableQueue} implementations.
*
* @see com.netflix.conductor.core.events.queue.ConductorObservableQueue
*/
@Bean
public Scheduler scheduler(ConductorProperties properties) {
ThreadFactory threadFactory =
new BasicThreadFactory.Builder()
.namingPattern("event-queue-poll-scheduler-thread-%d")
.build();
Executor executorService =
Executors.newFixedThreadPool(
properties.getEventQueueSchedulerPollThreadCount(), threadFactory);
return Schedulers.from(executorService);
}
@Bean(SWEEPER_EXECUTOR_NAME)
public Executor sweeperExecutor(ConductorProperties properties) {
if (properties.getSweeperThreadCount() <= 0) {
throw new IllegalStateException(
"conductor.app.sweeper-thread-count must be greater than 0.");
}
ThreadFactory threadFactory =
new BasicThreadFactory.Builder().namingPattern("sweeper-thread-%d").build();
return Executors.newFixedThreadPool(properties.getSweeperThreadCount(), threadFactory);
}
@Override
public void configureTasks(ScheduledTaskRegistrar taskRegistrar) {
ThreadPoolTaskScheduler threadPoolTaskScheduler = new ThreadPoolTaskScheduler();
threadPoolTaskScheduler.setPoolSize(3); // equal to the number of scheduled jobs
threadPoolTaskScheduler.setThreadNamePrefix("scheduled-task-pool-");
threadPoolTaskScheduler.initialize();
taskRegistrar.setTaskScheduler(threadPoolTaskScheduler);
}
}
| 6,666 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/IDGenerator.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.util.UUID;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
@Component
@ConditionalOnProperty(
name = "conductor.id.generator",
havingValue = "default",
matchIfMissing = true)
/**
* ID Generator used by Conductor Note on overriding the ID Generator: The default ID generator uses
* UUID v4 as the ID format. By overriding this class it is possible to use different scheme for ID
* generation. However, this is not normal and should only be done after very careful consideration.
*
* <p>Please note, if you use Cassandra persistence, the schema uses UUID as the column type and the
* IDs have to be valid UUIDs supported by Cassandra.
*/
public class IDGenerator {
public IDGenerator() {}
public String generate() {
return UUID.randomUUID().toString();
}
}
| 6,667 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtils.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.exception.NonTransientException;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
/** Provides utility functions to upload and download payloads to {@link ExternalPayloadStorage} */
@Component
public class ExternalPayloadStorageUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(ExternalPayloadStorageUtils.class);
private final ExternalPayloadStorage externalPayloadStorage;
private final ConductorProperties properties;
private final ObjectMapper objectMapper;
public ExternalPayloadStorageUtils(
ExternalPayloadStorage externalPayloadStorage,
ConductorProperties properties,
ObjectMapper objectMapper) {
this.externalPayloadStorage = externalPayloadStorage;
this.properties = properties;
this.objectMapper = objectMapper;
}
/**
* Download the payload from the given path.
*
* @param path the relative path of the payload in the {@link ExternalPayloadStorage}
* @return the payload object
* @throws NonTransientException in case of JSON parsing errors or download errors
*/
@SuppressWarnings("unchecked")
public Map<String, Object> downloadPayload(String path) {
try (InputStream inputStream = externalPayloadStorage.download(path)) {
return objectMapper.readValue(
IOUtils.toString(inputStream, StandardCharsets.UTF_8), Map.class);
} catch (TransientException te) {
throw te;
} catch (Exception e) {
LOGGER.error("Unable to download payload from external storage path: {}", path, e);
throw new NonTransientException(
"Unable to download payload from external storage path: " + path, e);
}
}
/**
* Verify the payload size and upload to external storage if necessary.
*
* @param entity the task or workflow for which the payload is to be verified and uploaded
* @param payloadType the {@link PayloadType} of the payload
* @param <T> {@link TaskModel} or {@link WorkflowModel}
* @throws NonTransientException in case of JSON parsing errors or upload errors
* @throws TerminateWorkflowException if the payload size is bigger than permissible limit as
* per {@link ConductorProperties}
*/
public <T> void verifyAndUpload(T entity, PayloadType payloadType) {
if (!shouldUpload(entity, payloadType)) return;
long threshold = 0L;
long maxThreshold = 0L;
Map<String, Object> payload = new HashMap<>();
String workflowId = "";
switch (payloadType) {
case TASK_INPUT:
threshold = properties.getTaskInputPayloadSizeThreshold().toKilobytes();
maxThreshold = properties.getMaxTaskInputPayloadSizeThreshold().toKilobytes();
payload = ((TaskModel) entity).getInputData();
workflowId = ((TaskModel) entity).getWorkflowInstanceId();
break;
case TASK_OUTPUT:
threshold = properties.getTaskOutputPayloadSizeThreshold().toKilobytes();
maxThreshold = properties.getMaxTaskOutputPayloadSizeThreshold().toKilobytes();
payload = ((TaskModel) entity).getOutputData();
workflowId = ((TaskModel) entity).getWorkflowInstanceId();
break;
case WORKFLOW_INPUT:
threshold = properties.getWorkflowInputPayloadSizeThreshold().toKilobytes();
maxThreshold = properties.getMaxWorkflowInputPayloadSizeThreshold().toKilobytes();
payload = ((WorkflowModel) entity).getInput();
workflowId = ((WorkflowModel) entity).getWorkflowId();
break;
case WORKFLOW_OUTPUT:
threshold = properties.getWorkflowOutputPayloadSizeThreshold().toKilobytes();
maxThreshold = properties.getMaxWorkflowOutputPayloadSizeThreshold().toKilobytes();
payload = ((WorkflowModel) entity).getOutput();
workflowId = ((WorkflowModel) entity).getWorkflowId();
break;
}
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
objectMapper.writeValue(byteArrayOutputStream, payload);
byte[] payloadBytes = byteArrayOutputStream.toByteArray();
long payloadSize = payloadBytes.length;
final long maxThresholdInBytes = maxThreshold * 1024;
if (payloadSize > maxThresholdInBytes) {
if (entity instanceof TaskModel) {
String errorMsg =
String.format(
"The payload size: %d of task: %s in workflow: %s is greater than the permissible limit: %d bytes",
payloadSize,
((TaskModel) entity).getTaskId(),
((TaskModel) entity).getWorkflowInstanceId(),
maxThresholdInBytes);
failTask(((TaskModel) entity), payloadType, errorMsg);
} else {
String errorMsg =
String.format(
"The payload size: %d of workflow: %s is greater than the permissible limit: %d bytes",
payloadSize,
((WorkflowModel) entity).getWorkflowId(),
maxThresholdInBytes);
failWorkflow(((WorkflowModel) entity), payloadType, errorMsg);
}
} else if (payloadSize > threshold * 1024) {
String externalInputPayloadStoragePath, externalOutputPayloadStoragePath;
switch (payloadType) {
case TASK_INPUT:
externalInputPayloadStoragePath =
uploadHelper(payloadBytes, payloadSize, PayloadType.TASK_INPUT);
((TaskModel) entity).externalizeInput(externalInputPayloadStoragePath);
Monitors.recordExternalPayloadStorageUsage(
((TaskModel) entity).getTaskDefName(),
ExternalPayloadStorage.Operation.WRITE.toString(),
PayloadType.TASK_INPUT.toString());
break;
case TASK_OUTPUT:
externalOutputPayloadStoragePath =
uploadHelper(payloadBytes, payloadSize, PayloadType.TASK_OUTPUT);
((TaskModel) entity).externalizeOutput(externalOutputPayloadStoragePath);
Monitors.recordExternalPayloadStorageUsage(
((TaskModel) entity).getTaskDefName(),
ExternalPayloadStorage.Operation.WRITE.toString(),
PayloadType.TASK_OUTPUT.toString());
break;
case WORKFLOW_INPUT:
externalInputPayloadStoragePath =
uploadHelper(payloadBytes, payloadSize, PayloadType.WORKFLOW_INPUT);
((WorkflowModel) entity).externalizeInput(externalInputPayloadStoragePath);
Monitors.recordExternalPayloadStorageUsage(
((WorkflowModel) entity).getWorkflowName(),
ExternalPayloadStorage.Operation.WRITE.toString(),
PayloadType.WORKFLOW_INPUT.toString());
break;
case WORKFLOW_OUTPUT:
externalOutputPayloadStoragePath =
uploadHelper(
payloadBytes, payloadSize, PayloadType.WORKFLOW_OUTPUT);
((WorkflowModel) entity)
.externalizeOutput(externalOutputPayloadStoragePath);
Monitors.recordExternalPayloadStorageUsage(
((WorkflowModel) entity).getWorkflowName(),
ExternalPayloadStorage.Operation.WRITE.toString(),
PayloadType.WORKFLOW_OUTPUT.toString());
break;
}
}
} catch (TransientException | TerminateWorkflowException te) {
throw te;
} catch (Exception e) {
LOGGER.error(
"Unable to upload payload to external storage for workflow: {}", workflowId, e);
throw new NonTransientException(
"Unable to upload payload to external storage for workflow: " + workflowId, e);
}
}
@VisibleForTesting
String uploadHelper(
byte[] payloadBytes, long payloadSize, ExternalPayloadStorage.PayloadType payloadType) {
ExternalStorageLocation location =
externalPayloadStorage.getLocation(
ExternalPayloadStorage.Operation.WRITE, payloadType, "", payloadBytes);
externalPayloadStorage.upload(
location.getPath(), new ByteArrayInputStream(payloadBytes), payloadSize);
return location.getPath();
}
@VisibleForTesting
void failTask(TaskModel task, PayloadType payloadType, String errorMsg) {
LOGGER.error(errorMsg);
task.setReasonForIncompletion(errorMsg);
task.setStatus(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR);
if (payloadType == PayloadType.TASK_INPUT) {
task.setInputData(new HashMap<>());
} else {
task.setOutputData(new HashMap<>());
}
}
@VisibleForTesting
void failWorkflow(WorkflowModel workflow, PayloadType payloadType, String errorMsg) {
LOGGER.error(errorMsg);
if (payloadType == PayloadType.WORKFLOW_INPUT) {
workflow.setInput(new HashMap<>());
} else {
workflow.setOutput(new HashMap<>());
}
throw new TerminateWorkflowException(errorMsg);
}
@VisibleForTesting
<T> boolean shouldUpload(T entity, PayloadType payloadType) {
if (entity instanceof TaskModel) {
TaskModel taskModel = (TaskModel) entity;
if (payloadType == PayloadType.TASK_INPUT) {
return !taskModel.getRawInputData().isEmpty();
} else {
return !taskModel.getRawOutputData().isEmpty();
}
} else {
WorkflowModel workflowModel = (WorkflowModel) entity;
if (payloadType == PayloadType.WORKFLOW_INPUT) {
return !workflowModel.getRawInput().isEmpty();
} else {
return !workflowModel.getRawOutput().isEmpty();
}
}
}
}
| 6,668 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/ParametersUtils.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.utils.EnvUtils;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.Option;
/** Used to parse and resolve the JSONPath bindings in the workflow and task definitions. */
@Component
public class ParametersUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(ParametersUtils.class);
private static final Pattern PATTERN =
Pattern.compile(
"(?=(?<!\\$)\\$\\{)(?:(?=.*?\\{(?!.*?\\1)(.*\\}(?!.*\\2).*))(?=.*?\\}(?!.*?\\2)(.*)).)+?.*?(?=\\1)[^{]*(?=\\2$)",
Pattern.DOTALL);
private final ObjectMapper objectMapper;
private final TypeReference<Map<String, Object>> map = new TypeReference<>() {};
public ParametersUtils(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
public Map<String, Object> getTaskInput(
Map<String, Object> inputParams,
WorkflowModel workflow,
TaskDef taskDefinition,
String taskId) {
if (workflow.getWorkflowDefinition().getSchemaVersion() > 1) {
return getTaskInputV2(inputParams, workflow, taskId, taskDefinition);
}
return getTaskInputV1(workflow, inputParams);
}
public Map<String, Object> getTaskInputV2(
Map<String, Object> input,
WorkflowModel workflow,
String taskId,
TaskDef taskDefinition) {
Map<String, Object> inputParams;
if (input != null) {
inputParams = clone(input);
} else {
inputParams = new HashMap<>();
}
if (taskDefinition != null && taskDefinition.getInputTemplate() != null) {
clone(taskDefinition.getInputTemplate()).forEach(inputParams::putIfAbsent);
}
Map<String, Map<String, Object>> inputMap = new HashMap<>();
Map<String, Object> workflowParams = new HashMap<>();
workflowParams.put("input", workflow.getInput());
workflowParams.put("output", workflow.getOutput());
workflowParams.put("status", workflow.getStatus());
workflowParams.put("workflowId", workflow.getWorkflowId());
workflowParams.put("parentWorkflowId", workflow.getParentWorkflowId());
workflowParams.put("parentWorkflowTaskId", workflow.getParentWorkflowTaskId());
workflowParams.put("workflowType", workflow.getWorkflowName());
workflowParams.put("version", workflow.getWorkflowVersion());
workflowParams.put("correlationId", workflow.getCorrelationId());
workflowParams.put("reasonForIncompletion", workflow.getReasonForIncompletion());
workflowParams.put("schemaVersion", workflow.getWorkflowDefinition().getSchemaVersion());
workflowParams.put("variables", workflow.getVariables());
inputMap.put("workflow", workflowParams);
// For new workflow being started the list of tasks will be empty
workflow.getTasks().stream()
.map(TaskModel::getReferenceTaskName)
.map(workflow::getTaskByRefName)
.forEach(
task -> {
Map<String, Object> taskParams = new HashMap<>();
taskParams.put("input", task.getInputData());
taskParams.put("output", task.getOutputData());
taskParams.put("taskType", task.getTaskType());
if (task.getStatus() != null) {
taskParams.put("status", task.getStatus().toString());
}
taskParams.put("referenceTaskName", task.getReferenceTaskName());
taskParams.put("retryCount", task.getRetryCount());
taskParams.put("correlationId", task.getCorrelationId());
taskParams.put("pollCount", task.getPollCount());
taskParams.put("taskDefName", task.getTaskDefName());
taskParams.put("scheduledTime", task.getScheduledTime());
taskParams.put("startTime", task.getStartTime());
taskParams.put("endTime", task.getEndTime());
taskParams.put("workflowInstanceId", task.getWorkflowInstanceId());
taskParams.put("taskId", task.getTaskId());
taskParams.put(
"reasonForIncompletion", task.getReasonForIncompletion());
taskParams.put("callbackAfterSeconds", task.getCallbackAfterSeconds());
taskParams.put("workerId", task.getWorkerId());
taskParams.put("iteration", task.getIteration());
inputMap.put(
task.isLoopOverTask()
? TaskUtils.removeIterationFromTaskRefName(
task.getReferenceTaskName())
: task.getReferenceTaskName(),
taskParams);
});
Configuration option =
Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS);
DocumentContext documentContext = JsonPath.parse(inputMap, option);
Map<String, Object> replacedTaskInput = replace(inputParams, documentContext, taskId);
if (taskDefinition != null && taskDefinition.getInputTemplate() != null) {
// If input for a given key resolves to null, try replacing it with one from
// inputTemplate, if it exists.
replacedTaskInput.replaceAll(
(key, value) ->
(value == null) ? taskDefinition.getInputTemplate().get(key) : value);
}
return replacedTaskInput;
}
// deep clone using json - POJO
private Map<String, Object> clone(Map<String, Object> inputTemplate) {
try {
byte[] bytes = objectMapper.writeValueAsBytes(inputTemplate);
return objectMapper.readValue(bytes, map);
} catch (IOException e) {
throw new RuntimeException("Unable to clone input params", e);
}
}
public Map<String, Object> replace(Map<String, Object> input, Object json) {
Object doc;
if (json instanceof String) {
doc = JsonPath.parse(json.toString());
} else {
doc = json;
}
Configuration option =
Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS);
DocumentContext documentContext = JsonPath.parse(doc, option);
return replace(input, documentContext, null);
}
public Object replace(String paramString) {
Configuration option =
Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS);
DocumentContext documentContext = JsonPath.parse(Collections.emptyMap(), option);
return replaceVariables(paramString, documentContext, null);
}
@SuppressWarnings("unchecked")
private Map<String, Object> replace(
Map<String, Object> input, DocumentContext documentContext, String taskId) {
Map<String, Object> result = new HashMap<>();
for (Entry<String, Object> e : input.entrySet()) {
Object newValue;
Object value = e.getValue();
if (value instanceof String) {
newValue = replaceVariables(value.toString(), documentContext, taskId);
} else if (value instanceof Map) {
// recursive call
newValue = replace((Map<String, Object>) value, documentContext, taskId);
} else if (value instanceof List) {
newValue = replaceList((List<?>) value, taskId, documentContext);
} else {
newValue = value;
}
result.put(e.getKey(), newValue);
}
return result;
}
@SuppressWarnings("unchecked")
private Object replaceList(List<?> values, String taskId, DocumentContext io) {
List<Object> replacedList = new LinkedList<>();
for (Object listVal : values) {
if (listVal instanceof String) {
Object replaced = replaceVariables(listVal.toString(), io, taskId);
replacedList.add(replaced);
} else if (listVal instanceof Map) {
Object replaced = replace((Map<String, Object>) listVal, io, taskId);
replacedList.add(replaced);
} else if (listVal instanceof List) {
Object replaced = replaceList((List<?>) listVal, taskId, io);
replacedList.add(replaced);
} else {
replacedList.add(listVal);
}
}
return replacedList;
}
private Object replaceVariables(
String paramString, DocumentContext documentContext, String taskId) {
return replaceVariables(paramString, documentContext, taskId, 0);
}
private Object replaceVariables(
String paramString, DocumentContext documentContext, String taskId, int depth) {
var matcher = PATTERN.matcher(paramString);
var replacements = new LinkedList<Replacement>();
while (matcher.find()) {
var start = matcher.start();
var end = matcher.end();
var match = paramString.substring(start, end);
String paramPath = match.substring(2, match.length() - 1);
paramPath = replaceVariables(paramPath, documentContext, taskId, depth + 1).toString();
// if the paramPath is blank, meaning no value in between ${ and }
// like ${}, ${ } etc, set the value to empty string
if (StringUtils.isBlank(paramPath)) {
replacements.add(new Replacement("", start, end));
continue;
}
if (EnvUtils.isEnvironmentVariable(paramPath)) {
String sysValue = EnvUtils.getSystemParametersValue(paramPath, taskId);
if (sysValue != null) {
replacements.add(new Replacement(sysValue, start, end));
}
} else {
try {
replacements.add(new Replacement(documentContext.read(paramPath), start, end));
} catch (Exception e) {
LOGGER.warn(
"Error reading documentContext for paramPath: {}. Exception: {}",
paramPath,
e);
replacements.add(new Replacement(null, start, end));
}
}
}
if (replacements.size() == 1
&& replacements.getFirst().getStartIndex() == 0
&& replacements.getFirst().getEndIndex() == paramString.length()
&& depth == 0) {
return replacements.get(0).getReplacement();
}
Collections.sort(replacements);
var builder = new StringBuilder(paramString);
for (int i = replacements.size() - 1; i >= 0; i--) {
var replacement = replacements.get(i);
builder.replace(
replacement.getStartIndex(),
replacement.getEndIndex(),
Objects.toString(replacement.getReplacement()));
}
return builder.toString().replaceAll("\\$\\$\\{", "\\${");
}
@Deprecated
// Workflow schema version 1 is deprecated and new workflows should be using version 2
private Map<String, Object> getTaskInputV1(
WorkflowModel workflow, Map<String, Object> inputParams) {
Map<String, Object> input = new HashMap<>();
if (inputParams == null) {
return input;
}
Map<String, Object> workflowInput = workflow.getInput();
inputParams.forEach(
(paramName, value) -> {
String paramPath = "" + value;
String[] paramPathComponents = paramPath.split("\\.");
Utils.checkArgument(
paramPathComponents.length == 3,
"Invalid input expression for "
+ paramName
+ ", paramPathComponents.size="
+ paramPathComponents.length
+ ", expression="
+ paramPath);
String source = paramPathComponents[0]; // workflow, or task reference name
String type = paramPathComponents[1]; // input/output
String name = paramPathComponents[2]; // name of the parameter
if ("workflow".equals(source)) {
input.put(paramName, workflowInput.get(name));
} else {
TaskModel task = workflow.getTaskByRefName(source);
if (task != null) {
if ("input".equals(type)) {
input.put(paramName, task.getInputData().get(name));
} else {
input.put(paramName, task.getOutputData().get(name));
}
}
}
});
return input;
}
public Map<String, Object> getWorkflowInput(
WorkflowDef workflowDef, Map<String, Object> inputParams) {
if (workflowDef != null && workflowDef.getInputTemplate() != null) {
clone(workflowDef.getInputTemplate()).forEach(inputParams::putIfAbsent);
}
return inputParams;
}
private static class Replacement implements Comparable<Replacement> {
private final int startIndex;
private final int endIndex;
private final Object replacement;
public Replacement(Object replacement, int startIndex, int endIndex) {
this.replacement = replacement;
this.startIndex = startIndex;
this.endIndex = endIndex;
}
public Object getReplacement() {
return replacement;
}
public int getStartIndex() {
return startIndex;
}
public int getEndIndex() {
return endIndex;
}
@Override
public int compareTo(Replacement o) {
return Long.compare(startIndex, o.startIndex);
}
}
}
| 6,669 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/Utils.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import com.netflix.conductor.core.exception.TransientException;
public class Utils {
public static final String DECIDER_QUEUE = "_deciderQueue";
/**
* ID of the server. Can be host name, IP address or any other meaningful identifier
*
* @return canonical host name resolved for the instance, "unknown" if resolution fails
*/
public static String getServerId() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
return "unknown";
}
}
/**
* Split string with "|" as delimiter.
*
* @param inputStr Input string
* @return List of String
*/
public static List<String> convertStringToList(String inputStr) {
List<String> list = new ArrayList<>();
if (StringUtils.isNotBlank(inputStr)) {
list = Arrays.asList(inputStr.split("\\|"));
}
return list;
}
/**
* Ensures the truth of an condition involving one or more parameters to the calling method.
*
* @param condition a boolean expression
* @param errorMessage The exception message use if the input condition is not valid
* @throws IllegalArgumentException if input condition is not valid.
*/
public static void checkArgument(boolean condition, String errorMessage) {
if (!condition) {
throw new IllegalArgumentException(errorMessage);
}
}
/**
* This method checks if the object is null or empty.
*
* @param object input of type {@link Object}.
* @param errorMessage The exception message use if the object is empty or null.
* @throws NullPointerException if input object is not valid.
*/
public static void checkNotNull(Object object, String errorMessage) {
if (object == null) {
throw new NullPointerException(errorMessage);
}
}
/**
* Used to determine if the exception is thrown due to a transient failure and the operation is
* expected to succeed upon retrying.
*
* @param throwable the exception that is thrown
* @return true - if the exception is a transient failure
* <p>false - if the exception is non-transient
*/
public static boolean isTransientException(Throwable throwable) {
if (throwable != null) {
return throwable instanceof TransientException;
}
return true;
}
}
| 6,670 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/DateTimeUtils.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.text.ParseException;
import java.time.Duration;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.time.DateUtils;
public class DateTimeUtils {
private static final String[] patterns =
new String[] {"yyyy-MM-dd HH:mm", "yyyy-MM-dd HH:mm z", "yyyy-MM-dd"};
public static Duration parseDuration(String text) {
Matcher m =
Pattern.compile(
"\\s*(?:(\\d+)\\s*(?:days?|d))?"
+ "\\s*(?:(\\d+)\\s*(?:hours?|hrs?|h))?"
+ "\\s*(?:(\\d+)\\s*(?:minutes?|mins?|m))?"
+ "\\s*(?:(\\d+)\\s*(?:seconds?|secs?|s))?"
+ "\\s*",
Pattern.CASE_INSENSITIVE)
.matcher(text);
if (!m.matches()) throw new IllegalArgumentException("Not valid duration: " + text);
int days = (m.start(1) == -1 ? 0 : Integer.parseInt(m.group(1)));
int hours = (m.start(2) == -1 ? 0 : Integer.parseInt(m.group(2)));
int mins = (m.start(3) == -1 ? 0 : Integer.parseInt(m.group(3)));
int secs = (m.start(4) == -1 ? 0 : Integer.parseInt(m.group(4)));
return Duration.ofSeconds((days * 86400) + (hours * 60L + mins) * 60L + secs);
}
public static Date parseDate(String date) throws ParseException {
return DateUtils.parseDate(date, patterns);
}
}
| 6,671 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/QueueUtils.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import org.apache.commons.lang3.StringUtils;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.model.TaskModel;
public class QueueUtils {
public static final String DOMAIN_SEPARATOR = ":";
private static final String ISOLATION_SEPARATOR = "-";
private static final String EXECUTION_NAME_SPACE_SEPARATOR = "@";
public static String getQueueName(TaskModel taskModel) {
return getQueueName(
taskModel.getTaskType(),
taskModel.getDomain(),
taskModel.getIsolationGroupId(),
taskModel.getExecutionNameSpace());
}
public static String getQueueName(Task task) {
return getQueueName(
task.getTaskType(),
task.getDomain(),
task.getIsolationGroupId(),
task.getExecutionNameSpace());
}
/**
* Creates a queue name string using <code>taskType</code>, <code>domain</code>, <code>
* isolationGroupId</code> and <code>executionNamespace</code>.
*
* @return domain:taskType@eexecutionNameSpace-isolationGroupId.
*/
public static String getQueueName(
String taskType, String domain, String isolationGroupId, String executionNamespace) {
String queueName;
if (domain == null) {
queueName = taskType;
} else {
queueName = domain + DOMAIN_SEPARATOR + taskType;
}
if (executionNamespace != null) {
queueName = queueName + EXECUTION_NAME_SPACE_SEPARATOR + executionNamespace;
}
if (isolationGroupId != null) {
queueName = queueName + ISOLATION_SEPARATOR + isolationGroupId;
}
return queueName;
}
public static String getQueueNameWithoutDomain(String queueName) {
return queueName.substring(queueName.indexOf(DOMAIN_SEPARATOR) + 1);
}
public static String getExecutionNameSpace(String queueName) {
if (StringUtils.contains(queueName, ISOLATION_SEPARATOR)
&& StringUtils.contains(queueName, EXECUTION_NAME_SPACE_SEPARATOR)) {
return StringUtils.substringBetween(
queueName, EXECUTION_NAME_SPACE_SEPARATOR, ISOLATION_SEPARATOR);
} else if (StringUtils.contains(queueName, EXECUTION_NAME_SPACE_SEPARATOR)) {
return StringUtils.substringAfter(queueName, EXECUTION_NAME_SPACE_SEPARATOR);
} else {
return StringUtils.EMPTY;
}
}
public static boolean isIsolatedQueue(String queue) {
return StringUtils.isNotBlank(getIsolationGroup(queue));
}
private static String getIsolationGroup(String queue) {
return StringUtils.substringAfter(queue, QueueUtils.ISOLATION_SEPARATOR);
}
public static String getTaskType(String queue) {
if (StringUtils.isBlank(queue)) {
return StringUtils.EMPTY;
}
int domainSeperatorIndex = StringUtils.indexOf(queue, DOMAIN_SEPARATOR);
int startIndex;
if (domainSeperatorIndex == -1) {
startIndex = 0;
} else {
startIndex = domainSeperatorIndex + 1;
}
int endIndex = StringUtils.indexOf(queue, EXECUTION_NAME_SPACE_SEPARATOR);
if (endIndex == -1) {
endIndex = StringUtils.lastIndexOf(queue, ISOLATION_SEPARATOR);
}
if (endIndex == -1) {
endIndex = queue.length();
}
return StringUtils.substring(queue, startIndex, endIndex);
}
}
| 6,672 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/JsonUtils.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.ObjectMapper;
/** This class contains utility functions for parsing/expanding JSON. */
@SuppressWarnings("unchecked")
@Component
public class JsonUtils {
private final ObjectMapper objectMapper;
public JsonUtils(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
/**
* Expands a JSON object into a java object
*
* @param input the object to be expanded
* @return the expanded object containing java types like {@link Map} and {@link List}
*/
public Object expand(Object input) {
if (input instanceof List) {
expandList((List<Object>) input);
return input;
} else if (input instanceof Map) {
expandMap((Map<String, Object>) input);
return input;
} else if (input instanceof String) {
return getJson((String) input);
} else {
return input;
}
}
private void expandList(List<Object> input) {
for (Object value : input) {
if (value instanceof String) {
if (isJsonString(value.toString())) {
value = getJson(value.toString());
}
} else if (value instanceof Map) {
expandMap((Map<String, Object>) value);
} else if (value instanceof List) {
expandList((List<Object>) value);
}
}
}
private void expandMap(Map<String, Object> input) {
for (Map.Entry<String, Object> entry : input.entrySet()) {
Object value = entry.getValue();
if (value instanceof String) {
if (isJsonString(value.toString())) {
entry.setValue(getJson(value.toString()));
}
} else if (value instanceof Map) {
expandMap((Map<String, Object>) value);
} else if (value instanceof List) {
expandList((List<Object>) value);
}
}
}
/**
* Used to obtain a JSONified object from a string
*
* @param jsonAsString the json object represented in string form
* @return the JSONified object representation if the input is a valid json string if the input
* is not a valid json string, it will be returned as-is and no exception is thrown
*/
private Object getJson(String jsonAsString) {
try {
return objectMapper.readValue(jsonAsString, Object.class);
} catch (Exception e) {
return jsonAsString;
}
}
private boolean isJsonString(String jsonAsString) {
jsonAsString = jsonAsString.trim();
return jsonAsString.startsWith("{") || jsonAsString.startsWith("[");
}
}
| 6,673 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/utils/SemaphoreUtil.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.util.concurrent.Semaphore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A class wrapping a semaphore which holds the number of permits available for processing. */
public class SemaphoreUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(SemaphoreUtil.class);
private final Semaphore semaphore;
public SemaphoreUtil(int numSlots) {
LOGGER.debug("Semaphore util initialized with {} permits", numSlots);
semaphore = new Semaphore(numSlots);
}
/**
* Signals if processing is allowed based on whether specified number of permits can be
* acquired.
*
* @param numSlots the number of permits to acquire
* @return {@code true} - if permit is acquired {@code false} - if permit could not be acquired
*/
public boolean acquireSlots(int numSlots) {
boolean acquired = semaphore.tryAcquire(numSlots);
LOGGER.trace("Trying to acquire {} permit: {}", numSlots, acquired);
return acquired;
}
/** Signals that processing is complete and the specified number of permits can be released. */
public void completeProcessing(int numSlots) {
LOGGER.trace("Completed execution; releasing permit");
semaphore.release(numSlots);
}
/**
* Gets the number of slots available for processing.
*
* @return number of available permits
*/
public int availableSlots() {
int available = semaphore.availablePermits();
LOGGER.trace("Number of available permits: {}", available);
return available;
}
}
| 6,674 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/storage/DummyPayloadStorage.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.storage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.UUID;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* A dummy implementation of {@link ExternalPayloadStorage} used when no external payload is
* configured
*/
public class DummyPayloadStorage implements ExternalPayloadStorage {
private static final Logger LOGGER = LoggerFactory.getLogger(DummyPayloadStorage.class);
private ObjectMapper objectMapper;
private File payloadDir;
public DummyPayloadStorage() {
try {
this.objectMapper = new ObjectMapper();
this.payloadDir = Files.createTempDirectory("payloads").toFile();
LOGGER.info(
"{} initialized in directory: {}",
this.getClass().getSimpleName(),
payloadDir.getAbsolutePath());
} catch (IOException ioException) {
LOGGER.error(
"Exception encountered while creating payloads directory : {}",
ioException.getMessage());
}
}
@Override
public ExternalStorageLocation getLocation(
Operation operation, PayloadType payloadType, String path) {
ExternalStorageLocation location = new ExternalStorageLocation();
location.setPath(path + UUID.randomUUID() + ".json");
return location;
}
@Override
public void upload(String path, InputStream payload, long payloadSize) {
File file = new File(payloadDir, path);
String filePath = file.getAbsolutePath();
try {
if (!file.exists() && file.createNewFile()) {
LOGGER.debug("Created file: {}", filePath);
}
IOUtils.copy(payload, new FileOutputStream(file));
LOGGER.debug("Written to {}", filePath);
} catch (IOException e) {
// just handle this exception here and return empty map so that test will fail in case
// this exception is thrown
LOGGER.error("Error writing to {}", filePath);
} finally {
try {
if (payload != null) {
payload.close();
}
} catch (IOException e) {
LOGGER.warn("Unable to close input stream when writing to file");
}
}
}
@Override
public InputStream download(String path) {
try {
LOGGER.debug("Reading from {}", path);
return new FileInputStream(new File(payloadDir, path));
} catch (IOException e) {
LOGGER.error("Error reading {}", path, e);
return null;
}
}
}
| 6,675 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/AsyncSystemTaskExecutor.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
@Component
public class AsyncSystemTaskExecutor {
private final ExecutionDAOFacade executionDAOFacade;
private final QueueDAO queueDAO;
private final MetadataDAO metadataDAO;
private final long queueTaskMessagePostponeSecs;
private final long systemTaskCallbackTime;
private final WorkflowExecutor workflowExecutor;
private static final Logger LOGGER = LoggerFactory.getLogger(AsyncSystemTaskExecutor.class);
public AsyncSystemTaskExecutor(
ExecutionDAOFacade executionDAOFacade,
QueueDAO queueDAO,
MetadataDAO metadataDAO,
ConductorProperties conductorProperties,
WorkflowExecutor workflowExecutor) {
this.executionDAOFacade = executionDAOFacade;
this.queueDAO = queueDAO;
this.metadataDAO = metadataDAO;
this.workflowExecutor = workflowExecutor;
this.systemTaskCallbackTime =
conductorProperties.getSystemTaskWorkerCallbackDuration().getSeconds();
this.queueTaskMessagePostponeSecs =
conductorProperties.getTaskExecutionPostponeDuration().getSeconds();
}
/**
* Executes and persists the results of an async {@link WorkflowSystemTask}.
*
* @param systemTask The {@link WorkflowSystemTask} to be executed.
* @param taskId The id of the {@link TaskModel} object.
*/
public void execute(WorkflowSystemTask systemTask, String taskId) {
TaskModel task = loadTaskQuietly(taskId);
if (task == null) {
LOGGER.error("TaskId: {} could not be found while executing {}", taskId, systemTask);
try {
LOGGER.debug(
"Cleaning up dead task from queue message: taskQueue={}, taskId={}",
systemTask.getTaskType(),
taskId);
queueDAO.remove(systemTask.getTaskType(), taskId);
} catch (Exception e) {
LOGGER.error(
"Failed to remove dead task from queue message: taskQueue={}, taskId={}",
systemTask.getTaskType(),
taskId);
}
return;
}
LOGGER.debug("Task: {} fetched from execution DAO for taskId: {}", task, taskId);
String queueName = QueueUtils.getQueueName(task);
if (task.getStatus().isTerminal()) {
// Tune the SystemTaskWorkerCoordinator's queues - if the queue size is very big this
// can happen!
LOGGER.info("Task {}/{} was already completed.", task.getTaskType(), task.getTaskId());
queueDAO.remove(queueName, task.getTaskId());
return;
}
if (task.getStatus().equals(TaskModel.Status.SCHEDULED)) {
if (executionDAOFacade.exceedsInProgressLimit(task)) {
LOGGER.warn(
"Concurrent Execution limited for {}:{}", taskId, task.getTaskDefName());
postponeQuietly(queueName, task);
return;
}
if (task.getRateLimitPerFrequency() > 0
&& executionDAOFacade.exceedsRateLimitPerFrequency(
task, metadataDAO.getTaskDef(task.getTaskDefName()))) {
LOGGER.warn(
"RateLimit Execution limited for {}:{}, limit:{}",
taskId,
task.getTaskDefName(),
task.getRateLimitPerFrequency());
postponeQuietly(queueName, task);
return;
}
}
boolean hasTaskExecutionCompleted = false;
boolean shouldRemoveTaskFromQueue = false;
String workflowId = task.getWorkflowInstanceId();
// if we are here the Task object is updated and needs to be persisted regardless of an
// exception
try {
WorkflowModel workflow =
executionDAOFacade.getWorkflowModel(
workflowId, systemTask.isTaskRetrievalRequired());
if (workflow.getStatus().isTerminal()) {
LOGGER.info(
"Workflow {} has been completed for {}/{}",
workflow.toShortString(),
systemTask,
task.getTaskId());
if (!task.getStatus().isTerminal()) {
task.setStatus(TaskModel.Status.CANCELED);
task.setReasonForIncompletion(
String.format(
"Workflow is in %s state", workflow.getStatus().toString()));
}
shouldRemoveTaskFromQueue = true;
return;
}
LOGGER.debug(
"Executing {}/{} in {} state",
task.getTaskType(),
task.getTaskId(),
task.getStatus());
boolean isTaskAsyncComplete = systemTask.isAsyncComplete(task);
if (task.getStatus() == TaskModel.Status.SCHEDULED || !isTaskAsyncComplete) {
task.incrementPollCount();
}
if (task.getStatus() == TaskModel.Status.SCHEDULED) {
task.setStartTime(System.currentTimeMillis());
Monitors.recordQueueWaitTime(task.getTaskType(), task.getQueueWaitTime());
systemTask.start(workflow, task, workflowExecutor);
} else if (task.getStatus() == TaskModel.Status.IN_PROGRESS) {
systemTask.execute(workflow, task, workflowExecutor);
}
// Update message in Task queue based on Task status
// Remove asyncComplete system tasks from the queue that are not in SCHEDULED state
if (isTaskAsyncComplete && task.getStatus() != TaskModel.Status.SCHEDULED) {
shouldRemoveTaskFromQueue = true;
hasTaskExecutionCompleted = true;
} else if (task.getStatus().isTerminal()) {
task.setEndTime(System.currentTimeMillis());
shouldRemoveTaskFromQueue = true;
hasTaskExecutionCompleted = true;
} else {
task.setCallbackAfterSeconds(systemTaskCallbackTime);
systemTask
.getEvaluationOffset(task, systemTaskCallbackTime)
.ifPresentOrElse(
task::setCallbackAfterSeconds,
() -> task.setCallbackAfterSeconds(systemTaskCallbackTime));
queueDAO.postpone(
queueName,
task.getTaskId(),
task.getWorkflowPriority(),
task.getCallbackAfterSeconds());
LOGGER.debug("{} postponed in queue: {}", task, queueName);
}
LOGGER.debug(
"Finished execution of {}/{}-{}",
systemTask,
task.getTaskId(),
task.getStatus());
} catch (Exception e) {
Monitors.error(AsyncSystemTaskExecutor.class.getSimpleName(), "executeSystemTask");
LOGGER.error("Error executing system task - {}, with id: {}", systemTask, taskId, e);
} finally {
executionDAOFacade.updateTask(task);
if (shouldRemoveTaskFromQueue) {
queueDAO.remove(queueName, task.getTaskId());
LOGGER.debug("{} removed from queue: {}", task, queueName);
}
// if the current task execution has completed, then the workflow needs to be evaluated
if (hasTaskExecutionCompleted) {
workflowExecutor.decide(workflowId);
}
}
}
private void postponeQuietly(String queueName, TaskModel task) {
try {
queueDAO.postpone(
queueName,
task.getTaskId(),
task.getWorkflowPriority(),
queueTaskMessagePostponeSecs);
} catch (Exception e) {
LOGGER.error("Error postponing task: {} in queue: {}", task.getTaskId(), queueName);
}
}
private TaskModel loadTaskQuietly(String taskId) {
try {
return executionDAOFacade.getTaskModel(taskId);
} catch (Exception e) {
return null;
}
}
}
| 6,676 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/StartWorkflowInput.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.util.Map;
import java.util.Objects;
import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
public class StartWorkflowInput {
private String name;
private Integer version;
private WorkflowDef workflowDefinition;
private Map<String, Object> workflowInput;
private String externalInputPayloadStoragePath;
private String correlationId;
private Integer priority;
private String parentWorkflowId;
private String parentWorkflowTaskId;
private String event;
private Map<String, String> taskToDomain;
private String workflowId;
private String triggeringWorkflowId;
public StartWorkflowInput() {}
public StartWorkflowInput(StartWorkflowRequest startWorkflowRequest) {
this.name = startWorkflowRequest.getName();
this.version = startWorkflowRequest.getVersion();
this.workflowDefinition = startWorkflowRequest.getWorkflowDef();
this.correlationId = startWorkflowRequest.getCorrelationId();
this.priority = startWorkflowRequest.getPriority();
this.workflowInput = startWorkflowRequest.getInput();
this.externalInputPayloadStoragePath =
startWorkflowRequest.getExternalInputPayloadStoragePath();
this.taskToDomain = startWorkflowRequest.getTaskToDomain();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getVersion() {
return version;
}
public void setVersion(Integer version) {
this.version = version;
}
public WorkflowDef getWorkflowDefinition() {
return workflowDefinition;
}
public void setWorkflowDefinition(WorkflowDef workflowDefinition) {
this.workflowDefinition = workflowDefinition;
}
public Map<String, Object> getWorkflowInput() {
return workflowInput;
}
public void setWorkflowInput(Map<String, Object> workflowInput) {
this.workflowInput = workflowInput;
}
public String getExternalInputPayloadStoragePath() {
return externalInputPayloadStoragePath;
}
public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) {
this.externalInputPayloadStoragePath = externalInputPayloadStoragePath;
}
public String getCorrelationId() {
return correlationId;
}
public void setCorrelationId(String correlationId) {
this.correlationId = correlationId;
}
public Integer getPriority() {
return priority;
}
public void setPriority(Integer priority) {
this.priority = priority;
}
public String getParentWorkflowId() {
return parentWorkflowId;
}
public void setParentWorkflowId(String parentWorkflowId) {
this.parentWorkflowId = parentWorkflowId;
}
public String getParentWorkflowTaskId() {
return parentWorkflowTaskId;
}
public void setParentWorkflowTaskId(String parentWorkflowTaskId) {
this.parentWorkflowTaskId = parentWorkflowTaskId;
}
public String getEvent() {
return event;
}
public void setEvent(String event) {
this.event = event;
}
public Map<String, String> getTaskToDomain() {
return taskToDomain;
}
public void setTaskToDomain(Map<String, String> taskToDomain) {
this.taskToDomain = taskToDomain;
}
public String getWorkflowId() {
return workflowId;
}
public void setWorkflowId(String workflowId) {
this.workflowId = workflowId;
}
public String getTriggeringWorkflowId() {
return triggeringWorkflowId;
}
public void setTriggeringWorkflowId(String triggeringWorkflowId) {
this.triggeringWorkflowId = triggeringWorkflowId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StartWorkflowInput that = (StartWorkflowInput) o;
return Objects.equals(name, that.name)
&& Objects.equals(version, that.version)
&& Objects.equals(workflowDefinition, that.workflowDefinition)
&& Objects.equals(workflowInput, that.workflowInput)
&& Objects.equals(
externalInputPayloadStoragePath, that.externalInputPayloadStoragePath)
&& Objects.equals(correlationId, that.correlationId)
&& Objects.equals(priority, that.priority)
&& Objects.equals(parentWorkflowId, that.parentWorkflowId)
&& Objects.equals(parentWorkflowTaskId, that.parentWorkflowTaskId)
&& Objects.equals(event, that.event)
&& Objects.equals(taskToDomain, that.taskToDomain)
&& Objects.equals(triggeringWorkflowId, that.triggeringWorkflowId)
&& Objects.equals(workflowId, that.workflowId);
}
@Override
public int hashCode() {
return Objects.hash(
name,
version,
workflowDefinition,
workflowInput,
externalInputPayloadStoragePath,
correlationId,
priority,
parentWorkflowId,
parentWorkflowTaskId,
event,
taskToDomain,
triggeringWorkflowId,
workflowId);
}
}
| 6,677 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.time.Duration;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.utils.ExternalPayloadStorage.Operation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.mapper.TaskMapper;
import com.netflix.conductor.core.execution.mapper.TaskMapperContext;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TERMINATE;
import static com.netflix.conductor.common.metadata.tasks.TaskType.USER_DEFINED;
import static com.netflix.conductor.model.TaskModel.Status.*;
/**
* Decider evaluates the state of the workflow by inspecting the current state along with the
* blueprint. The result of the evaluation is either to schedule further tasks, complete/fail the
* workflow or do nothing.
*/
@Service
@Trace
public class DeciderService {
private static final Logger LOGGER = LoggerFactory.getLogger(DeciderService.class);
private final IDGenerator idGenerator;
private final ParametersUtils parametersUtils;
private final ExternalPayloadStorageUtils externalPayloadStorageUtils;
private final MetadataDAO metadataDAO;
private final SystemTaskRegistry systemTaskRegistry;
private final long taskPendingTimeThresholdMins;
private final Map<String, TaskMapper> taskMappers;
public DeciderService(
IDGenerator idGenerator,
ParametersUtils parametersUtils,
MetadataDAO metadataDAO,
ExternalPayloadStorageUtils externalPayloadStorageUtils,
SystemTaskRegistry systemTaskRegistry,
@Qualifier("taskMappersByTaskType") Map<String, TaskMapper> taskMappers,
@Value("${conductor.app.taskPendingTimeThreshold:60m}")
Duration taskPendingTimeThreshold) {
this.idGenerator = idGenerator;
this.metadataDAO = metadataDAO;
this.parametersUtils = parametersUtils;
this.taskMappers = taskMappers;
this.externalPayloadStorageUtils = externalPayloadStorageUtils;
this.taskPendingTimeThresholdMins = taskPendingTimeThreshold.toMinutes();
this.systemTaskRegistry = systemTaskRegistry;
}
public DeciderOutcome decide(WorkflowModel workflow) throws TerminateWorkflowException {
// In case of a new workflow the list of tasks will be empty.
final List<TaskModel> tasks = workflow.getTasks();
// Filter the list of tasks and include only tasks that are not executed,
// not marked to be skipped and not ready for rerun.
// For a new workflow, the list of unprocessedTasks will be empty
List<TaskModel> unprocessedTasks =
tasks.stream()
.filter(t -> !t.getStatus().equals(SKIPPED) && !t.isExecuted())
.collect(Collectors.toList());
List<TaskModel> tasksToBeScheduled = new LinkedList<>();
if (unprocessedTasks.isEmpty()) {
// this is the flow that the new workflow will go through
tasksToBeScheduled = startWorkflow(workflow);
if (tasksToBeScheduled == null) {
tasksToBeScheduled = new LinkedList<>();
}
}
return decide(workflow, tasksToBeScheduled);
}
private DeciderOutcome decide(final WorkflowModel workflow, List<TaskModel> preScheduledTasks)
throws TerminateWorkflowException {
DeciderOutcome outcome = new DeciderOutcome();
if (workflow.getStatus().isTerminal()) {
// you cannot evaluate a terminal workflow
LOGGER.debug(
"Workflow {} is already finished. Reason: {}",
workflow,
workflow.getReasonForIncompletion());
return outcome;
}
checkWorkflowTimeout(workflow);
if (workflow.getStatus().equals(WorkflowModel.Status.PAUSED)) {
LOGGER.debug("Workflow " + workflow.getWorkflowId() + " is paused");
return outcome;
}
List<TaskModel> pendingTasks = new ArrayList<>();
Set<String> executedTaskRefNames = new HashSet<>();
boolean hasSuccessfulTerminateTask = false;
for (TaskModel task : workflow.getTasks()) {
// Filter the list of tasks and include only tasks that are not retried, not executed
// marked to be skipped and not part of System tasks that is DECISION, FORK, JOIN
// This list will be empty for a new workflow being started
if (!task.isRetried() && !task.getStatus().equals(SKIPPED) && !task.isExecuted()) {
pendingTasks.add(task);
}
// Get all the tasks that have not completed their lifecycle yet
// This list will be empty for a new workflow
if (task.isExecuted()) {
executedTaskRefNames.add(task.getReferenceTaskName());
}
if (TERMINATE.name().equals(task.getTaskType())
&& task.getStatus().isTerminal()
&& task.getStatus().isSuccessful()) {
hasSuccessfulTerminateTask = true;
outcome.terminateTask = task;
}
}
Map<String, TaskModel> tasksToBeScheduled = new LinkedHashMap<>();
preScheduledTasks.forEach(
preScheduledTask -> {
tasksToBeScheduled.put(
preScheduledTask.getReferenceTaskName(), preScheduledTask);
});
// A new workflow does not enter this code branch
for (TaskModel pendingTask : pendingTasks) {
if (systemTaskRegistry.isSystemTask(pendingTask.getTaskType())
&& !pendingTask.getStatus().isTerminal()) {
tasksToBeScheduled.putIfAbsent(pendingTask.getReferenceTaskName(), pendingTask);
executedTaskRefNames.remove(pendingTask.getReferenceTaskName());
}
Optional<TaskDef> taskDefinition = pendingTask.getTaskDefinition();
if (taskDefinition.isEmpty()) {
taskDefinition =
Optional.ofNullable(
workflow.getWorkflowDefinition()
.getTaskByRefName(
pendingTask.getReferenceTaskName()))
.map(WorkflowTask::getTaskDefinition);
}
if (taskDefinition.isPresent()) {
checkTaskTimeout(taskDefinition.get(), pendingTask);
checkTaskPollTimeout(taskDefinition.get(), pendingTask);
// If the task has not been updated for "responseTimeoutSeconds" then mark task as
// TIMED_OUT
if (isResponseTimedOut(taskDefinition.get(), pendingTask)) {
timeoutTask(taskDefinition.get(), pendingTask);
}
}
if (!pendingTask.getStatus().isSuccessful()) {
WorkflowTask workflowTask = pendingTask.getWorkflowTask();
if (workflowTask == null) {
workflowTask =
workflow.getWorkflowDefinition()
.getTaskByRefName(pendingTask.getReferenceTaskName());
}
Optional<TaskModel> retryTask =
retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow);
if (retryTask.isPresent()) {
tasksToBeScheduled.put(retryTask.get().getReferenceTaskName(), retryTask.get());
executedTaskRefNames.remove(retryTask.get().getReferenceTaskName());
outcome.tasksToBeUpdated.add(pendingTask);
} else {
pendingTask.setStatus(COMPLETED_WITH_ERRORS);
}
}
if (!pendingTask.isExecuted()
&& !pendingTask.isRetried()
&& pendingTask.getStatus().isTerminal()) {
pendingTask.setExecuted(true);
List<TaskModel> nextTasks = getNextTask(workflow, pendingTask);
if (pendingTask.isLoopOverTask()
&& !TaskType.DO_WHILE.name().equals(pendingTask.getTaskType())
&& !nextTasks.isEmpty()) {
nextTasks = filterNextLoopOverTasks(nextTasks, pendingTask, workflow);
}
nextTasks.forEach(
nextTask ->
tasksToBeScheduled.putIfAbsent(
nextTask.getReferenceTaskName(), nextTask));
outcome.tasksToBeUpdated.add(pendingTask);
LOGGER.debug(
"Scheduling Tasks from {}, next = {} for workflowId: {}",
pendingTask.getTaskDefName(),
nextTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toList()),
workflow.getWorkflowId());
}
}
// All the tasks that need to scheduled are added to the outcome, in case of
List<TaskModel> unScheduledTasks =
tasksToBeScheduled.values().stream()
.filter(task -> !executedTaskRefNames.contains(task.getReferenceTaskName()))
.collect(Collectors.toList());
if (!unScheduledTasks.isEmpty()) {
LOGGER.debug(
"Scheduling Tasks: {} for workflow: {}",
unScheduledTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toList()),
workflow.getWorkflowId());
outcome.tasksToBeScheduled.addAll(unScheduledTasks);
}
if (hasSuccessfulTerminateTask
|| (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow))) {
LOGGER.debug("Marking workflow: {} as complete.", workflow);
outcome.isComplete = true;
}
return outcome;
}
@VisibleForTesting
List<TaskModel> filterNextLoopOverTasks(
List<TaskModel> tasks, TaskModel pendingTask, WorkflowModel workflow) {
// Update the task reference name and iteration
tasks.forEach(
nextTask -> {
nextTask.setReferenceTaskName(
TaskUtils.appendIteration(
nextTask.getReferenceTaskName(), pendingTask.getIteration()));
nextTask.setIteration(pendingTask.getIteration());
});
List<String> tasksInWorkflow =
workflow.getTasks().stream()
.filter(
runningTask ->
runningTask.getStatus().equals(TaskModel.Status.IN_PROGRESS)
|| runningTask.getStatus().isTerminal())
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toList());
return tasks.stream()
.filter(
runningTask ->
!tasksInWorkflow.contains(runningTask.getReferenceTaskName()))
.collect(Collectors.toList());
}
private List<TaskModel> startWorkflow(WorkflowModel workflow)
throws TerminateWorkflowException {
final WorkflowDef workflowDef = workflow.getWorkflowDefinition();
LOGGER.debug("Starting workflow: {}", workflow);
// The tasks will be empty in case of new workflow
List<TaskModel> tasks = workflow.getTasks();
// Check if the workflow is a re-run case or if it is a new workflow execution
if (workflow.getReRunFromWorkflowId() == null || tasks.isEmpty()) {
if (workflowDef.getTasks().isEmpty()) {
throw new TerminateWorkflowException(
"No tasks found to be executed", WorkflowModel.Status.COMPLETED);
}
WorkflowTask taskToSchedule =
workflowDef
.getTasks()
.get(0); // Nothing is running yet - so schedule the first task
// Loop until a non-skipped task is found
while (isTaskSkipped(taskToSchedule, workflow)) {
taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName());
}
// In case of a new workflow, the first non-skippable task will be scheduled
return getTasksToBeScheduled(workflow, taskToSchedule, 0);
}
// Get the first task to schedule
TaskModel rerunFromTask =
tasks.stream()
.findFirst()
.map(
task -> {
task.setStatus(SCHEDULED);
task.setRetried(true);
task.setRetryCount(0);
return task;
})
.orElseThrow(
() -> {
String reason =
String.format(
"The workflow %s is marked for re-run from %s but could not find the starting task",
workflow.getWorkflowId(),
workflow.getReRunFromWorkflowId());
return new TerminateWorkflowException(reason);
});
return Collections.singletonList(rerunFromTask);
}
/**
* Updates the workflow output.
*
* @param workflow the workflow instance
* @param task if not null, the output of this task will be copied to workflow output if no
* output parameters are specified in the workflow definition if null, the output of the
* last task in the workflow will be copied to workflow output of no output parameters are
* specified in the workflow definition
*/
void updateWorkflowOutput(final WorkflowModel workflow, TaskModel task) {
List<TaskModel> allTasks = workflow.getTasks();
if (allTasks.isEmpty()) {
return;
}
Map<String, Object> output = new HashMap<>();
Optional<TaskModel> optionalTask =
allTasks.stream()
.filter(
t ->
TaskType.TERMINATE.name().equals(t.getTaskType())
&& t.getStatus().isTerminal()
&& t.getStatus().isSuccessful())
.findFirst();
if (optionalTask.isPresent()) {
TaskModel terminateTask = optionalTask.get();
if (StringUtils.isNotBlank(terminateTask.getExternalOutputPayloadStoragePath())) {
output =
externalPayloadStorageUtils.downloadPayload(
terminateTask.getExternalOutputPayloadStoragePath());
Monitors.recordExternalPayloadStorageUsage(
terminateTask.getTaskDefName(),
Operation.READ.toString(),
PayloadType.TASK_OUTPUT.toString());
} else if (!terminateTask.getOutputData().isEmpty()) {
output = terminateTask.getOutputData();
}
} else {
TaskModel last = Optional.ofNullable(task).orElse(allTasks.get(allTasks.size() - 1));
WorkflowDef workflowDef = workflow.getWorkflowDefinition();
if (workflowDef.getOutputParameters() != null
&& !workflowDef.getOutputParameters().isEmpty()) {
output =
parametersUtils.getTaskInput(
workflowDef.getOutputParameters(), workflow, null, null);
} else if (StringUtils.isNotBlank(last.getExternalOutputPayloadStoragePath())) {
output =
externalPayloadStorageUtils.downloadPayload(
last.getExternalOutputPayloadStoragePath());
Monitors.recordExternalPayloadStorageUsage(
last.getTaskDefName(),
Operation.READ.toString(),
PayloadType.TASK_OUTPUT.toString());
} else {
output = last.getOutputData();
}
}
workflow.setOutput(output);
}
public boolean checkForWorkflowCompletion(final WorkflowModel workflow)
throws TerminateWorkflowException {
Map<String, TaskModel.Status> taskStatusMap = new HashMap<>();
List<TaskModel> nonExecutedTasks = new ArrayList<>();
for (TaskModel task : workflow.getTasks()) {
taskStatusMap.put(task.getReferenceTaskName(), task.getStatus());
if (!task.getStatus().isTerminal()) {
return false;
}
// If there is a TERMINATE task that has been executed successfuly then the workflow
// should be marked as completed.
if (TERMINATE.name().equals(task.getTaskType())
&& task.getStatus().isTerminal()
&& task.getStatus().isSuccessful()) {
return true;
}
if (!task.isRetried() || !task.isExecuted()) {
nonExecutedTasks.add(task);
}
}
// If there are no tasks executed, then we are not done yet
if (taskStatusMap.isEmpty()) {
return false;
}
List<WorkflowTask> workflowTasks = workflow.getWorkflowDefinition().getTasks();
for (WorkflowTask wftask : workflowTasks) {
TaskModel.Status status = taskStatusMap.get(wftask.getTaskReferenceName());
if (status == null || !status.isTerminal()) {
return false;
}
// if we reach here, the task has been completed.
// Was the task successful in completion?
if (!status.isSuccessful()) {
return false;
}
}
boolean noPendingSchedule =
nonExecutedTasks.stream()
.parallel()
.noneMatch(
wftask -> {
String next = getNextTasksToBeScheduled(workflow, wftask);
return next != null && !taskStatusMap.containsKey(next);
});
return noPendingSchedule;
}
List<TaskModel> getNextTask(WorkflowModel workflow, TaskModel task) {
final WorkflowDef workflowDef = workflow.getWorkflowDefinition();
// Get the following task after the last completed task
if (systemTaskRegistry.isSystemTask(task.getTaskType())
&& (TaskType.TASK_TYPE_DECISION.equals(task.getTaskType())
|| TaskType.TASK_TYPE_SWITCH.equals(task.getTaskType()))) {
if (task.getInputData().get("hasChildren") != null) {
return Collections.emptyList();
}
}
String taskReferenceName =
task.isLoopOverTask()
? TaskUtils.removeIterationFromTaskRefName(task.getReferenceTaskName())
: task.getReferenceTaskName();
WorkflowTask taskToSchedule = workflowDef.getNextTask(taskReferenceName);
while (isTaskSkipped(taskToSchedule, workflow)) {
taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName());
}
if (taskToSchedule != null && TaskType.DO_WHILE.name().equals(taskToSchedule.getType())) {
// check if already has this DO_WHILE task, ignore it if it already exists
String nextTaskReferenceName = taskToSchedule.getTaskReferenceName();
if (workflow.getTasks().stream()
.anyMatch(
runningTask ->
runningTask
.getReferenceTaskName()
.equals(nextTaskReferenceName))) {
return Collections.emptyList();
}
}
if (taskToSchedule != null) {
return getTasksToBeScheduled(workflow, taskToSchedule, 0);
}
return Collections.emptyList();
}
private String getNextTasksToBeScheduled(WorkflowModel workflow, TaskModel task) {
final WorkflowDef def = workflow.getWorkflowDefinition();
String taskReferenceName = task.getReferenceTaskName();
WorkflowTask taskToSchedule = def.getNextTask(taskReferenceName);
while (isTaskSkipped(taskToSchedule, workflow)) {
taskToSchedule = def.getNextTask(taskToSchedule.getTaskReferenceName());
}
return taskToSchedule == null ? null : taskToSchedule.getTaskReferenceName();
}
@VisibleForTesting
Optional<TaskModel> retry(
TaskDef taskDefinition,
WorkflowTask workflowTask,
TaskModel task,
WorkflowModel workflow)
throws TerminateWorkflowException {
int retryCount = task.getRetryCount();
if (taskDefinition == null) {
taskDefinition = metadataDAO.getTaskDef(task.getTaskDefName());
}
final int expectedRetryCount =
taskDefinition == null
? 0
: Optional.ofNullable(workflowTask)
.map(WorkflowTask::getRetryCount)
.orElse(taskDefinition.getRetryCount());
if (!task.getStatus().isRetriable()
|| TaskType.isBuiltIn(task.getTaskType())
|| expectedRetryCount <= retryCount) {
if (workflowTask != null && workflowTask.isOptional()) {
return Optional.empty();
}
WorkflowModel.Status status;
switch (task.getStatus()) {
case CANCELED:
status = WorkflowModel.Status.TERMINATED;
break;
case TIMED_OUT:
status = WorkflowModel.Status.TIMED_OUT;
break;
default:
status = WorkflowModel.Status.FAILED;
break;
}
updateWorkflowOutput(workflow, task);
final String errMsg =
String.format(
"Task %s failed with status: %s and reason: '%s'",
task.getTaskId(), status, task.getReasonForIncompletion());
throw new TerminateWorkflowException(errMsg, status, task);
}
// retry... - but not immediately - put a delay...
int startDelay = taskDefinition.getRetryDelaySeconds();
switch (taskDefinition.getRetryLogic()) {
case FIXED:
startDelay = taskDefinition.getRetryDelaySeconds();
break;
case LINEAR_BACKOFF:
int linearRetryDelaySeconds =
taskDefinition.getRetryDelaySeconds()
* taskDefinition.getBackoffScaleFactor()
* (task.getRetryCount() + 1);
// Reset integer overflow to max value
startDelay =
linearRetryDelaySeconds < 0 ? Integer.MAX_VALUE : linearRetryDelaySeconds;
break;
case EXPONENTIAL_BACKOFF:
int exponentialRetryDelaySeconds =
taskDefinition.getRetryDelaySeconds()
* (int) Math.pow(2, task.getRetryCount());
// Reset integer overflow to max value
startDelay =
exponentialRetryDelaySeconds < 0
? Integer.MAX_VALUE
: exponentialRetryDelaySeconds;
break;
}
task.setRetried(true);
TaskModel rescheduled = task.copy();
rescheduled.setStartDelayInSeconds(startDelay);
rescheduled.setCallbackAfterSeconds(startDelay);
rescheduled.setRetryCount(task.getRetryCount() + 1);
rescheduled.setRetried(false);
rescheduled.setTaskId(idGenerator.generate());
rescheduled.setRetriedTaskId(task.getTaskId());
rescheduled.setStatus(SCHEDULED);
rescheduled.setPollCount(0);
rescheduled.setInputData(new HashMap<>(task.getInputData()));
rescheduled.setReasonForIncompletion(null);
rescheduled.setSubWorkflowId(null);
rescheduled.setSeq(0);
rescheduled.setScheduledTime(0);
rescheduled.setStartTime(0);
rescheduled.setEndTime(0);
rescheduled.setWorkerId(null);
if (StringUtils.isNotBlank(task.getExternalInputPayloadStoragePath())) {
rescheduled.setExternalInputPayloadStoragePath(
task.getExternalInputPayloadStoragePath());
} else {
rescheduled.addInput(task.getInputData());
}
if (workflowTask != null && workflow.getWorkflowDefinition().getSchemaVersion() > 1) {
Map<String, Object> taskInput =
parametersUtils.getTaskInputV2(
workflowTask.getInputParameters(),
workflow,
rescheduled.getTaskId(),
taskDefinition);
rescheduled.addInput(taskInput);
}
// for the schema version 1, we do not have to recompute the inputs
return Optional.of(rescheduled);
}
@VisibleForTesting
void checkWorkflowTimeout(WorkflowModel workflow) {
WorkflowDef workflowDef = workflow.getWorkflowDefinition();
if (workflowDef == null) {
LOGGER.warn("Missing workflow definition : {}", workflow.getWorkflowId());
return;
}
if (workflow.getStatus().isTerminal() || workflowDef.getTimeoutSeconds() <= 0) {
return;
}
long timeout = 1000L * workflowDef.getTimeoutSeconds();
long now = System.currentTimeMillis();
long elapsedTime =
workflow.getLastRetriedTime() > 0
? now - workflow.getLastRetriedTime()
: now - workflow.getCreateTime();
if (elapsedTime < timeout) {
return;
}
String reason =
String.format(
"Workflow timed out after %d seconds. Timeout configured as %d seconds. "
+ "Timeout policy configured to %s",
elapsedTime / 1000L,
workflowDef.getTimeoutSeconds(),
workflowDef.getTimeoutPolicy().name());
switch (workflowDef.getTimeoutPolicy()) {
case ALERT_ONLY:
LOGGER.info("{} {}", workflow.getWorkflowId(), reason);
Monitors.recordWorkflowTermination(
workflow.getWorkflowName(),
WorkflowModel.Status.TIMED_OUT,
workflow.getOwnerApp());
return;
case TIME_OUT_WF:
throw new TerminateWorkflowException(reason, WorkflowModel.Status.TIMED_OUT);
}
}
@VisibleForTesting
void checkTaskTimeout(TaskDef taskDef, TaskModel task) {
if (taskDef == null) {
LOGGER.warn(
"Missing task definition for task:{}/{} in workflow:{}",
task.getTaskId(),
task.getTaskDefName(),
task.getWorkflowInstanceId());
return;
}
if (task.getStatus().isTerminal()
|| taskDef.getTimeoutSeconds() <= 0
|| task.getStartTime() <= 0) {
return;
}
long timeout = 1000L * taskDef.getTimeoutSeconds();
long now = System.currentTimeMillis();
long elapsedTime =
now - (task.getStartTime() + ((long) task.getStartDelayInSeconds() * 1000L));
if (elapsedTime < timeout) {
return;
}
String reason =
String.format(
"Task timed out after %d seconds. Timeout configured as %d seconds. "
+ "Timeout policy configured to %s",
elapsedTime / 1000L,
taskDef.getTimeoutSeconds(),
taskDef.getTimeoutPolicy().name());
timeoutTaskWithTimeoutPolicy(reason, taskDef, task);
}
@VisibleForTesting
void checkTaskPollTimeout(TaskDef taskDef, TaskModel task) {
if (taskDef == null) {
LOGGER.warn(
"Missing task definition for task:{}/{} in workflow:{}",
task.getTaskId(),
task.getTaskDefName(),
task.getWorkflowInstanceId());
return;
}
if (taskDef.getPollTimeoutSeconds() == null
|| taskDef.getPollTimeoutSeconds() <= 0
|| !task.getStatus().equals(SCHEDULED)) {
return;
}
final long pollTimeout = 1000L * taskDef.getPollTimeoutSeconds();
final long adjustedPollTimeout = pollTimeout + task.getCallbackAfterSeconds() * 1000L;
final long now = System.currentTimeMillis();
final long pollElapsedTime =
now - (task.getScheduledTime() + ((long) task.getStartDelayInSeconds() * 1000L));
if (pollElapsedTime < adjustedPollTimeout) {
return;
}
String reason =
String.format(
"Task poll timed out after %d seconds. Poll timeout configured as %d seconds. Timeout policy configured to %s",
pollElapsedTime / 1000L,
pollTimeout / 1000L,
taskDef.getTimeoutPolicy().name());
timeoutTaskWithTimeoutPolicy(reason, taskDef, task);
}
void timeoutTaskWithTimeoutPolicy(String reason, TaskDef taskDef, TaskModel task) {
Monitors.recordTaskTimeout(task.getTaskDefName());
switch (taskDef.getTimeoutPolicy()) {
case ALERT_ONLY:
LOGGER.info(reason);
return;
case RETRY:
task.setStatus(TIMED_OUT);
task.setReasonForIncompletion(reason);
return;
case TIME_OUT_WF:
task.setStatus(TIMED_OUT);
task.setReasonForIncompletion(reason);
throw new TerminateWorkflowException(reason, WorkflowModel.Status.TIMED_OUT, task);
}
}
@VisibleForTesting
boolean isResponseTimedOut(TaskDef taskDefinition, TaskModel task) {
if (taskDefinition == null) {
LOGGER.warn(
"missing task type : {}, workflowId= {}",
task.getTaskDefName(),
task.getWorkflowInstanceId());
return false;
}
if (task.getStatus().isTerminal() || isAyncCompleteSystemTask(task)) {
return false;
}
// calculate pendingTime
long now = System.currentTimeMillis();
long callbackTime = 1000L * task.getCallbackAfterSeconds();
long referenceTime =
task.getUpdateTime() > 0 ? task.getUpdateTime() : task.getScheduledTime();
long pendingTime = now - (referenceTime + callbackTime);
Monitors.recordTaskPendingTime(task.getTaskType(), task.getWorkflowType(), pendingTime);
long thresholdMS = taskPendingTimeThresholdMins * 60 * 1000;
if (pendingTime > thresholdMS) {
LOGGER.warn(
"Task: {} of type: {} in workflow: {}/{} is in pending state for longer than {} ms",
task.getTaskId(),
task.getTaskType(),
task.getWorkflowInstanceId(),
task.getWorkflowType(),
thresholdMS);
}
if (!task.getStatus().equals(IN_PROGRESS)
|| taskDefinition.getResponseTimeoutSeconds() == 0) {
return false;
}
LOGGER.debug(
"Evaluating responseTimeOut for Task: {}, with Task Definition: {}",
task,
taskDefinition);
long responseTimeout = 1000L * taskDefinition.getResponseTimeoutSeconds();
long adjustedResponseTimeout = responseTimeout + callbackTime;
long noResponseTime = now - task.getUpdateTime();
if (noResponseTime < adjustedResponseTimeout) {
LOGGER.debug(
"Current responseTime: {} has not exceeded the configured responseTimeout of {} for the Task: {} with Task Definition: {}",
pendingTime,
responseTimeout,
task,
taskDefinition);
return false;
}
Monitors.recordTaskResponseTimeout(task.getTaskDefName());
return true;
}
private void timeoutTask(TaskDef taskDef, TaskModel task) {
String reason =
"responseTimeout: "
+ taskDef.getResponseTimeoutSeconds()
+ " exceeded for the taskId: "
+ task.getTaskId()
+ " with Task Definition: "
+ task.getTaskDefName();
LOGGER.debug(reason);
task.setStatus(TIMED_OUT);
task.setReasonForIncompletion(reason);
}
public List<TaskModel> getTasksToBeScheduled(
WorkflowModel workflow, WorkflowTask taskToSchedule, int retryCount) {
return getTasksToBeScheduled(workflow, taskToSchedule, retryCount, null);
}
public List<TaskModel> getTasksToBeScheduled(
WorkflowModel workflow,
WorkflowTask taskToSchedule,
int retryCount,
String retriedTaskId) {
Map<String, Object> input =
parametersUtils.getTaskInput(
taskToSchedule.getInputParameters(), workflow, null, null);
String type = taskToSchedule.getType();
// get tasks already scheduled (in progress/terminal) for this workflow instance
List<String> tasksInWorkflow =
workflow.getTasks().stream()
.filter(
runningTask ->
runningTask.getStatus().equals(TaskModel.Status.IN_PROGRESS)
|| runningTask.getStatus().isTerminal())
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toList());
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(taskToSchedule.getTaskDefinition())
.withWorkflowTask(taskToSchedule)
.withTaskInput(input)
.withRetryCount(retryCount)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.withDeciderService(this)
.build();
// For static forks, each branch of the fork creates a join task upon completion for
// dynamic forks, a join task is created with the fork and also with each branch of the
// fork.
// A new task must only be scheduled if a task, with the same reference name is not already
// in this workflow instance
return taskMappers
.getOrDefault(type, taskMappers.get(USER_DEFINED.name()))
.getMappedTasks(taskMapperContext)
.stream()
.filter(task -> !tasksInWorkflow.contains(task.getReferenceTaskName()))
.collect(Collectors.toList());
}
private boolean isTaskSkipped(WorkflowTask taskToSchedule, WorkflowModel workflow) {
try {
boolean isTaskSkipped = false;
if (taskToSchedule != null) {
TaskModel t = workflow.getTaskByRefName(taskToSchedule.getTaskReferenceName());
if (t == null) {
isTaskSkipped = false;
} else if (t.getStatus().equals(SKIPPED)) {
isTaskSkipped = true;
}
}
return isTaskSkipped;
} catch (Exception e) {
throw new TerminateWorkflowException(e.getMessage());
}
}
private boolean isAyncCompleteSystemTask(TaskModel task) {
return systemTaskRegistry.isSystemTask(task.getTaskType())
&& systemTaskRegistry.get(task.getTaskType()).isAsyncComplete(task);
}
public static class DeciderOutcome {
List<TaskModel> tasksToBeScheduled = new LinkedList<>();
List<TaskModel> tasksToBeUpdated = new LinkedList<>();
boolean isComplete;
TaskModel terminateTask;
private DeciderOutcome() {}
}
}
| 6,678 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.*;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.event.WorkflowCreationEvent;
import com.netflix.conductor.core.event.WorkflowEvaluationEvent;
import com.netflix.conductor.core.exception.*;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.execution.tasks.Terminate;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.listener.TaskStatusListener;
import com.netflix.conductor.core.listener.WorkflowStatusListener;
import com.netflix.conductor.core.metadata.MetadataMapperService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.core.utils.Utils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.ExecutionLockService;
import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE;
import static com.netflix.conductor.model.TaskModel.Status.*;
/** Workflow services provider interface */
@Trace
@Component
public class WorkflowExecutor {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutor.class);
private static final int EXPEDITED_PRIORITY = 10;
private static final String CLASS_NAME = WorkflowExecutor.class.getSimpleName();
private static final Predicate<TaskModel> UNSUCCESSFUL_TERMINAL_TASK =
task -> !task.getStatus().isSuccessful() && task.getStatus().isTerminal();
private static final Predicate<TaskModel> UNSUCCESSFUL_JOIN_TASK =
UNSUCCESSFUL_TERMINAL_TASK.and(t -> TaskType.TASK_TYPE_JOIN.equals(t.getTaskType()));
private static final Predicate<TaskModel> NON_TERMINAL_TASK =
task -> !task.getStatus().isTerminal();
private final MetadataDAO metadataDAO;
private final QueueDAO queueDAO;
private final DeciderService deciderService;
private final ConductorProperties properties;
private final MetadataMapperService metadataMapperService;
private final ExecutionDAOFacade executionDAOFacade;
private final ParametersUtils parametersUtils;
private final IDGenerator idGenerator;
private final WorkflowStatusListener workflowStatusListener;
private final TaskStatusListener taskStatusListener;
private final SystemTaskRegistry systemTaskRegistry;
private final ApplicationEventPublisher eventPublisher;
private long activeWorkerLastPollMs;
private final ExecutionLockService executionLockService;
private final Predicate<PollData> validateLastPolledTime =
pollData ->
pollData.getLastPollTime()
> System.currentTimeMillis() - activeWorkerLastPollMs;
public WorkflowExecutor(
DeciderService deciderService,
MetadataDAO metadataDAO,
QueueDAO queueDAO,
MetadataMapperService metadataMapperService,
WorkflowStatusListener workflowStatusListener,
TaskStatusListener taskStatusListener,
ExecutionDAOFacade executionDAOFacade,
ConductorProperties properties,
ExecutionLockService executionLockService,
SystemTaskRegistry systemTaskRegistry,
ParametersUtils parametersUtils,
IDGenerator idGenerator,
ApplicationEventPublisher eventPublisher) {
this.deciderService = deciderService;
this.metadataDAO = metadataDAO;
this.queueDAO = queueDAO;
this.properties = properties;
this.metadataMapperService = metadataMapperService;
this.executionDAOFacade = executionDAOFacade;
this.activeWorkerLastPollMs = properties.getActiveWorkerLastPollTimeout().toMillis();
this.workflowStatusListener = workflowStatusListener;
this.taskStatusListener = taskStatusListener;
this.executionLockService = executionLockService;
this.parametersUtils = parametersUtils;
this.idGenerator = idGenerator;
this.systemTaskRegistry = systemTaskRegistry;
this.eventPublisher = eventPublisher;
}
/**
* @param workflowId the id of the workflow for which task callbacks are to be reset
* @throws ConflictException if the workflow is in terminal state
*/
public void resetCallbacksForWorkflow(String workflowId) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (workflow.getStatus().isTerminal()) {
throw new ConflictException(
"Workflow is in terminal state. Status = %s", workflow.getStatus());
}
// Get SIMPLE tasks in SCHEDULED state that have callbackAfterSeconds > 0 and set the
// callbackAfterSeconds to 0
workflow.getTasks().stream()
.filter(
task ->
!systemTaskRegistry.isSystemTask(task.getTaskType())
&& SCHEDULED == task.getStatus()
&& task.getCallbackAfterSeconds() > 0)
.forEach(
task -> {
if (queueDAO.resetOffsetTime(
QueueUtils.getQueueName(task), task.getTaskId())) {
task.setCallbackAfterSeconds(0);
executionDAOFacade.updateTask(task);
}
});
}
public String rerun(RerunWorkflowRequest request) {
Utils.checkNotNull(request.getReRunFromWorkflowId(), "reRunFromWorkflowId is missing");
if (!rerunWF(
request.getReRunFromWorkflowId(),
request.getReRunFromTaskId(),
request.getTaskInput(),
request.getWorkflowInput(),
request.getCorrelationId())) {
throw new IllegalArgumentException(
"Task " + request.getReRunFromTaskId() + " not found");
}
return request.getReRunFromWorkflowId();
}
/**
* @param workflowId the id of the workflow to be restarted
* @param useLatestDefinitions if true, use the latest workflow and task definitions upon
* restart
* @throws ConflictException Workflow is not in a terminal state.
* @throws NotFoundException Workflow definition is not found or Workflow is deemed
* non-restartable as per workflow definition.
*/
public void restart(String workflowId, boolean useLatestDefinitions) {
final WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
String errorMsg =
String.format(
"Workflow: %s is not in terminal state, unable to restart.", workflow);
LOGGER.error(errorMsg);
throw new ConflictException(errorMsg);
}
WorkflowDef workflowDef;
if (useLatestDefinitions) {
workflowDef =
metadataDAO
.getLatestWorkflowDef(workflow.getWorkflowName())
.orElseThrow(
() ->
new NotFoundException(
"Unable to find latest definition for %s",
workflowId));
workflow.setWorkflowDefinition(workflowDef);
workflowDef = metadataMapperService.populateTaskDefinitions(workflowDef);
} else {
workflowDef =
Optional.ofNullable(workflow.getWorkflowDefinition())
.orElseGet(
() ->
metadataDAO
.getWorkflowDef(
workflow.getWorkflowName(),
workflow.getWorkflowVersion())
.orElseThrow(
() ->
new NotFoundException(
"Unable to find definition for %s",
workflowId)));
}
if (!workflowDef.isRestartable()
&& workflow.getStatus()
.equals(
WorkflowModel.Status
.COMPLETED)) { // Can only restart non-completed workflows
// when the configuration is set to false
throw new NotFoundException("Workflow: %s is non-restartable", workflow);
}
// Reset the workflow in the primary datastore and remove from indexer; then re-create it
executionDAOFacade.resetWorkflow(workflowId);
workflow.getTasks().clear();
workflow.setReasonForIncompletion(null);
workflow.setFailedTaskId(null);
workflow.setCreateTime(System.currentTimeMillis());
workflow.setEndTime(0);
workflow.setLastRetriedTime(0);
// Change the status to running
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setOutput(null);
workflow.setExternalOutputPayloadStoragePath(null);
try {
executionDAOFacade.createWorkflow(workflow);
} catch (Exception e) {
Monitors.recordWorkflowStartError(
workflowDef.getName(), WorkflowContext.get().getClientApp());
LOGGER.error("Unable to restart workflow: {}", workflowDef.getName(), e);
terminateWorkflow(workflowId, "Error when restarting the workflow");
throw e;
}
metadataMapperService.populateWorkflowWithDefinitions(workflow);
decide(workflowId);
updateAndPushParents(workflow, "restarted");
}
/**
* Gets the last instance of each failed task and reschedule each Gets all cancelled tasks and
* schedule all of them except JOIN (join should change status to INPROGRESS) Switch workflow
* back to RUNNING status and call decider.
*
* @param workflowId the id of the workflow to be retried
*/
public void retry(String workflowId, boolean resumeSubworkflowTasks) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
throw new NotFoundException(
"Workflow is still running. status=%s", workflow.getStatus());
}
if (workflow.getTasks().isEmpty()) {
throw new ConflictException("Workflow has not started yet");
}
if (resumeSubworkflowTasks) {
Optional<TaskModel> taskToRetry =
workflow.getTasks().stream().filter(UNSUCCESSFUL_TERMINAL_TASK).findFirst();
if (taskToRetry.isPresent()) {
workflow = findLastFailedSubWorkflowIfAny(taskToRetry.get(), workflow);
retry(workflow);
updateAndPushParents(workflow, "retried");
}
} else {
retry(workflow);
updateAndPushParents(workflow, "retried");
}
}
private void updateAndPushParents(WorkflowModel workflow, String operation) {
String workflowIdentifier = "";
while (workflow.hasParent()) {
// update parent's sub workflow task
TaskModel subWorkflowTask =
executionDAOFacade.getTaskModel(workflow.getParentWorkflowTaskId());
if (subWorkflowTask.getWorkflowTask().isOptional()) {
// break out
LOGGER.info(
"Sub workflow task {} is optional, skip updating parents", subWorkflowTask);
break;
}
subWorkflowTask.setSubworkflowChanged(true);
subWorkflowTask.setStatus(IN_PROGRESS);
executionDAOFacade.updateTask(subWorkflowTask);
// add an execution log
String currentWorkflowIdentifier = workflow.toShortString();
workflowIdentifier =
!workflowIdentifier.equals("")
? String.format(
"%s -> %s", currentWorkflowIdentifier, workflowIdentifier)
: currentWorkflowIdentifier;
TaskExecLog log =
new TaskExecLog(
String.format("Sub workflow %s %s.", workflowIdentifier, operation));
log.setTaskId(subWorkflowTask.getTaskId());
executionDAOFacade.addTaskExecLog(Collections.singletonList(log));
LOGGER.info("Task {} updated. {}", log.getTaskId(), log.getLog());
// push the parent workflow to decider queue for asynchronous 'decide'
String parentWorkflowId = workflow.getParentWorkflowId();
WorkflowModel parentWorkflow =
executionDAOFacade.getWorkflowModel(parentWorkflowId, true);
parentWorkflow.setStatus(WorkflowModel.Status.RUNNING);
parentWorkflow.setLastRetriedTime(System.currentTimeMillis());
executionDAOFacade.updateWorkflow(parentWorkflow);
expediteLazyWorkflowEvaluation(parentWorkflowId);
workflow = parentWorkflow;
}
}
private void retry(WorkflowModel workflow) {
// Get all FAILED or CANCELED tasks that are not COMPLETED (or reach other terminal states)
// on further executions.
// // Eg: for Seq of tasks task1.CANCELED, task1.COMPLETED, task1 shouldn't be retried.
// Throw an exception if there are no FAILED tasks.
// Handle JOIN task CANCELED status as special case.
Map<String, TaskModel> retriableMap = new HashMap<>();
for (TaskModel task : workflow.getTasks()) {
switch (task.getStatus()) {
case FAILED:
case FAILED_WITH_TERMINAL_ERROR:
case TIMED_OUT:
retriableMap.put(task.getReferenceTaskName(), task);
break;
case CANCELED:
if (task.getTaskType().equalsIgnoreCase(TaskType.JOIN.toString())
|| task.getTaskType().equalsIgnoreCase(TaskType.DO_WHILE.toString())) {
task.setStatus(IN_PROGRESS);
addTaskToQueue(task);
// Task doesn't have to be updated yet. Will be updated along with other
// Workflow tasks downstream.
} else {
retriableMap.put(task.getReferenceTaskName(), task);
}
break;
default:
retriableMap.remove(task.getReferenceTaskName());
break;
}
}
// if workflow TIMED_OUT due to timeoutSeconds configured in the workflow definition,
// it may not have any unsuccessful tasks that can be retried
if (retriableMap.values().size() == 0
&& workflow.getStatus() != WorkflowModel.Status.TIMED_OUT) {
throw new ConflictException(
"There are no retryable tasks! Use restart if you want to attempt entire workflow execution again.");
}
// Update Workflow with new status.
// This should load Workflow from archive, if archived.
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setLastRetriedTime(System.currentTimeMillis());
String lastReasonForIncompletion = workflow.getReasonForIncompletion();
workflow.setReasonForIncompletion(null);
// Add to decider queue
queueDAO.push(
DECIDER_QUEUE,
workflow.getWorkflowId(),
workflow.getPriority(),
properties.getWorkflowOffsetTimeout().getSeconds());
executionDAOFacade.updateWorkflow(workflow);
LOGGER.info(
"Workflow {} that failed due to '{}' was retried",
workflow.toShortString(),
lastReasonForIncompletion);
// taskToBeRescheduled would set task `retried` to true, and hence it's important to
// updateTasks after obtaining task copy from taskToBeRescheduled.
final WorkflowModel finalWorkflow = workflow;
List<TaskModel> retriableTasks =
retriableMap.values().stream()
.sorted(Comparator.comparingInt(TaskModel::getSeq))
.map(task -> taskToBeRescheduled(finalWorkflow, task))
.collect(Collectors.toList());
dedupAndAddTasks(workflow, retriableTasks);
// Note: updateTasks before updateWorkflow might fail when Workflow is archived and doesn't
// exist in primary store.
executionDAOFacade.updateTasks(workflow.getTasks());
scheduleTask(workflow, retriableTasks);
}
private WorkflowModel findLastFailedSubWorkflowIfAny(
TaskModel task, WorkflowModel parentWorkflow) {
if (TaskType.TASK_TYPE_SUB_WORKFLOW.equals(task.getTaskType())
&& UNSUCCESSFUL_TERMINAL_TASK.test(task)) {
WorkflowModel subWorkflow =
executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true);
Optional<TaskModel> taskToRetry =
subWorkflow.getTasks().stream().filter(UNSUCCESSFUL_TERMINAL_TASK).findFirst();
if (taskToRetry.isPresent()) {
return findLastFailedSubWorkflowIfAny(taskToRetry.get(), subWorkflow);
}
}
return parentWorkflow;
}
/**
* Reschedule a task
*
* @param task failed or cancelled task
* @return new instance of a task with "SCHEDULED" status
*/
private TaskModel taskToBeRescheduled(WorkflowModel workflow, TaskModel task) {
TaskModel taskToBeRetried = task.copy();
taskToBeRetried.setTaskId(idGenerator.generate());
taskToBeRetried.setRetriedTaskId(task.getTaskId());
taskToBeRetried.setStatus(SCHEDULED);
taskToBeRetried.setRetryCount(task.getRetryCount() + 1);
taskToBeRetried.setRetried(false);
taskToBeRetried.setPollCount(0);
taskToBeRetried.setCallbackAfterSeconds(0);
taskToBeRetried.setSubWorkflowId(null);
taskToBeRetried.setScheduledTime(0);
taskToBeRetried.setStartTime(0);
taskToBeRetried.setEndTime(0);
taskToBeRetried.setWorkerId(null);
taskToBeRetried.setReasonForIncompletion(null);
taskToBeRetried.setSeq(0);
// perform parameter replacement for retried task
Map<String, Object> taskInput =
parametersUtils.getTaskInput(
taskToBeRetried.getWorkflowTask().getInputParameters(),
workflow,
taskToBeRetried.getWorkflowTask().getTaskDefinition(),
taskToBeRetried.getTaskId());
taskToBeRetried.getInputData().putAll(taskInput);
task.setRetried(true);
// since this task is being retried and a retry has been computed, task lifecycle is
// complete
task.setExecuted(true);
return taskToBeRetried;
}
private void endExecution(WorkflowModel workflow, TaskModel terminateTask) {
if (terminateTask != null) {
String terminationStatus =
(String)
terminateTask
.getInputData()
.get(Terminate.getTerminationStatusParameter());
String reason =
(String)
terminateTask
.getInputData()
.get(Terminate.getTerminationReasonParameter());
if (StringUtils.isBlank(reason)) {
reason =
String.format(
"Workflow is %s by TERMINATE task: %s",
terminationStatus, terminateTask.getTaskId());
}
if (WorkflowModel.Status.FAILED.name().equals(terminationStatus)) {
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow =
terminate(
workflow,
new TerminateWorkflowException(
reason, workflow.getStatus(), terminateTask));
} else {
workflow.setReasonForIncompletion(reason);
workflow = completeWorkflow(workflow);
}
} else {
workflow = completeWorkflow(workflow);
}
cancelNonTerminalTasks(workflow);
}
/**
* @param workflow the workflow to be completed
* @throws ConflictException if workflow is already in terminal state.
*/
@VisibleForTesting
WorkflowModel completeWorkflow(WorkflowModel workflow) {
LOGGER.debug("Completing workflow execution for {}", workflow.getWorkflowId());
if (workflow.getStatus().equals(WorkflowModel.Status.COMPLETED)) {
queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); // remove from the sweep queue
executionDAOFacade.removeFromPendingWorkflow(
workflow.getWorkflowName(), workflow.getWorkflowId());
LOGGER.debug("Workflow: {} has already been completed.", workflow.getWorkflowId());
return workflow;
}
if (workflow.getStatus().isTerminal()) {
String msg =
"Workflow is already in terminal state. Current status: "
+ workflow.getStatus();
throw new ConflictException(msg);
}
deciderService.updateWorkflowOutput(workflow, null);
workflow.setStatus(WorkflowModel.Status.COMPLETED);
// update the failed reference task names
List<TaskModel> failedTasks =
workflow.getTasks().stream()
.filter(
t ->
FAILED.equals(t.getStatus())
|| FAILED_WITH_TERMINAL_ERROR.equals(t.getStatus()))
.collect(Collectors.toList());
workflow.getFailedReferenceTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toSet()));
workflow.getFailedTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toSet()));
executionDAOFacade.updateWorkflow(workflow);
LOGGER.debug("Completed workflow execution for {}", workflow.getWorkflowId());
workflowStatusListener.onWorkflowCompletedIfEnabled(workflow);
Monitors.recordWorkflowCompletion(
workflow.getWorkflowName(),
workflow.getEndTime() - workflow.getCreateTime(),
workflow.getOwnerApp());
if (workflow.hasParent()) {
updateParentWorkflowTask(workflow);
LOGGER.info(
"{} updated parent {} task {}",
workflow.toShortString(),
workflow.getParentWorkflowId(),
workflow.getParentWorkflowTaskId());
expediteLazyWorkflowEvaluation(workflow.getParentWorkflowId());
}
executionLockService.releaseLock(workflow.getWorkflowId());
executionLockService.deleteLock(workflow.getWorkflowId());
return workflow;
}
public void terminateWorkflow(String workflowId, String reason) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (WorkflowModel.Status.COMPLETED.equals(workflow.getStatus())) {
throw new ConflictException("Cannot terminate a COMPLETED workflow.");
}
workflow.setStatus(WorkflowModel.Status.TERMINATED);
terminateWorkflow(workflow, reason, null);
}
/**
* @param workflow the workflow to be terminated
* @param reason the reason for termination
* @param failureWorkflow the failure workflow (if any) to be triggered as a result of this
* termination
*/
public WorkflowModel terminateWorkflow(
WorkflowModel workflow, String reason, String failureWorkflow) {
try {
executionLockService.acquireLock(workflow.getWorkflowId(), 60000);
if (!workflow.getStatus().isTerminal()) {
workflow.setStatus(WorkflowModel.Status.TERMINATED);
}
try {
deciderService.updateWorkflowOutput(workflow, null);
} catch (Exception e) {
// catch any failure in this step and continue the execution of terminating workflow
LOGGER.error(
"Failed to update output data for workflow: {}",
workflow.getWorkflowId(),
e);
Monitors.error(CLASS_NAME, "terminateWorkflow");
}
// update the failed reference task names
List<TaskModel> failedTasks =
workflow.getTasks().stream()
.filter(
t ->
FAILED.equals(t.getStatus())
|| FAILED_WITH_TERMINAL_ERROR.equals(
t.getStatus()))
.collect(Collectors.toList());
workflow.getFailedReferenceTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toSet()));
workflow.getFailedTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toSet()));
String workflowId = workflow.getWorkflowId();
workflow.setReasonForIncompletion(reason);
executionDAOFacade.updateWorkflow(workflow);
workflowStatusListener.onWorkflowTerminatedIfEnabled(workflow);
Monitors.recordWorkflowTermination(
workflow.getWorkflowName(), workflow.getStatus(), workflow.getOwnerApp());
LOGGER.info("Workflow {} is terminated because of {}", workflowId, reason);
List<TaskModel> tasks = workflow.getTasks();
try {
// Remove from the task queue if they were there
tasks.forEach(
task -> queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId()));
} catch (Exception e) {
LOGGER.warn(
"Error removing task(s) from queue during workflow termination : {}",
workflowId,
e);
}
if (workflow.hasParent()) {
updateParentWorkflowTask(workflow);
LOGGER.info(
"{} updated parent {} task {}",
workflow.toShortString(),
workflow.getParentWorkflowId(),
workflow.getParentWorkflowTaskId());
expediteLazyWorkflowEvaluation(workflow.getParentWorkflowId());
}
if (!StringUtils.isBlank(failureWorkflow)) {
Map<String, Object> input = new HashMap<>(workflow.getInput());
input.put("workflowId", workflowId);
input.put("reason", reason);
input.put("failureStatus", workflow.getStatus().toString());
if (workflow.getFailedTaskId() != null) {
input.put("failureTaskId", workflow.getFailedTaskId());
}
input.put("failedWorkflow", workflow);
try {
String failureWFId = idGenerator.generate();
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName(failureWorkflow);
startWorkflowInput.setWorkflowInput(input);
startWorkflowInput.setCorrelationId(workflow.getCorrelationId());
startWorkflowInput.setTaskToDomain(workflow.getTaskToDomain());
startWorkflowInput.setWorkflowId(failureWFId);
startWorkflowInput.setTriggeringWorkflowId(workflowId);
eventPublisher.publishEvent(new WorkflowCreationEvent(startWorkflowInput));
workflow.addOutput("conductor.failure_workflow", failureWFId);
} catch (Exception e) {
LOGGER.error("Failed to start error workflow", e);
workflow.getOutput()
.put(
"conductor.failure_workflow",
"Error workflow "
+ failureWorkflow
+ " failed to start. reason: "
+ e.getMessage());
Monitors.recordWorkflowStartError(
failureWorkflow, WorkflowContext.get().getClientApp());
}
executionDAOFacade.updateWorkflow(workflow);
}
executionDAOFacade.removeFromPendingWorkflow(
workflow.getWorkflowName(), workflow.getWorkflowId());
List<String> erroredTasks = cancelNonTerminalTasks(workflow);
if (!erroredTasks.isEmpty()) {
throw new NonTransientException(
String.format(
"Error canceling system tasks: %s",
String.join(",", erroredTasks)));
}
return workflow;
} finally {
executionLockService.releaseLock(workflow.getWorkflowId());
executionLockService.deleteLock(workflow.getWorkflowId());
}
}
/**
* @param taskResult the task result to be updated.
* @throws IllegalArgumentException if the {@link TaskResult} is null.
* @throws NotFoundException if the Task is not found.
*/
public void updateTask(TaskResult taskResult) {
if (taskResult == null) {
throw new IllegalArgumentException("Task object is null");
} else if (taskResult.isExtendLease()) {
extendLease(taskResult);
return;
}
String workflowId = taskResult.getWorkflowInstanceId();
WorkflowModel workflowInstance = executionDAOFacade.getWorkflowModel(workflowId, false);
TaskModel task =
Optional.ofNullable(executionDAOFacade.getTaskModel(taskResult.getTaskId()))
.orElseThrow(
() ->
new NotFoundException(
"No such task found by id: %s",
taskResult.getTaskId()));
LOGGER.debug("Task: {} belonging to Workflow {} being updated", task, workflowInstance);
String taskQueueName = QueueUtils.getQueueName(task);
if (task.getStatus().isTerminal()) {
// Task was already updated....
queueDAO.remove(taskQueueName, taskResult.getTaskId());
LOGGER.info(
"Task: {} has already finished execution with status: {} within workflow: {}. Removed task from queue: {}",
task.getTaskId(),
task.getStatus(),
task.getWorkflowInstanceId(),
taskQueueName);
Monitors.recordUpdateConflict(
task.getTaskType(), workflowInstance.getWorkflowName(), task.getStatus());
return;
}
if (workflowInstance.getStatus().isTerminal()) {
// Workflow is in terminal state
queueDAO.remove(taskQueueName, taskResult.getTaskId());
LOGGER.info(
"Workflow: {} has already finished execution. Task update for: {} ignored and removed from Queue: {}.",
workflowInstance,
taskResult.getTaskId(),
taskQueueName);
Monitors.recordUpdateConflict(
task.getTaskType(),
workflowInstance.getWorkflowName(),
workflowInstance.getStatus());
return;
}
// for system tasks, setting to SCHEDULED would mean restarting the task which is
// undesirable
// for worker tasks, set status to SCHEDULED and push to the queue
if (!systemTaskRegistry.isSystemTask(task.getTaskType())
&& taskResult.getStatus() == TaskResult.Status.IN_PROGRESS) {
task.setStatus(SCHEDULED);
} else {
task.setStatus(TaskModel.Status.valueOf(taskResult.getStatus().name()));
}
task.setOutputMessage(taskResult.getOutputMessage());
task.setReasonForIncompletion(taskResult.getReasonForIncompletion());
task.setWorkerId(taskResult.getWorkerId());
task.setCallbackAfterSeconds(taskResult.getCallbackAfterSeconds());
task.setOutputData(taskResult.getOutputData());
task.setSubWorkflowId(taskResult.getSubWorkflowId());
if (StringUtils.isNotBlank(taskResult.getExternalOutputPayloadStoragePath())) {
task.setExternalOutputPayloadStoragePath(
taskResult.getExternalOutputPayloadStoragePath());
}
if (task.getStatus().isTerminal()) {
task.setEndTime(System.currentTimeMillis());
}
// Update message in Task queue based on Task status
switch (task.getStatus()) {
case COMPLETED:
case CANCELED:
case FAILED:
case FAILED_WITH_TERMINAL_ERROR:
case TIMED_OUT:
try {
queueDAO.remove(taskQueueName, taskResult.getTaskId());
LOGGER.debug(
"Task: {} removed from taskQueue: {} since the task status is {}",
task,
taskQueueName,
task.getStatus().name());
} catch (Exception e) {
// Ignore exceptions on queue remove as it wouldn't impact task and workflow
// execution, and will be cleaned up eventually
String errorMsg =
String.format(
"Error removing the message in queue for task: %s for workflow: %s",
task.getTaskId(), workflowId);
LOGGER.warn(errorMsg, e);
Monitors.recordTaskQueueOpError(
task.getTaskType(), workflowInstance.getWorkflowName());
}
break;
case IN_PROGRESS:
case SCHEDULED:
try {
long callBack = taskResult.getCallbackAfterSeconds();
queueDAO.postpone(
taskQueueName, task.getTaskId(), task.getWorkflowPriority(), callBack);
LOGGER.debug(
"Task: {} postponed in taskQueue: {} since the task status is {} with callbackAfterSeconds: {}",
task,
taskQueueName,
task.getStatus().name(),
callBack);
} catch (Exception e) {
// Throw exceptions on queue postpone, this would impact task execution
String errorMsg =
String.format(
"Error postponing the message in queue for task: %s for workflow: %s",
task.getTaskId(), workflowId);
LOGGER.error(errorMsg, e);
Monitors.recordTaskQueueOpError(
task.getTaskType(), workflowInstance.getWorkflowName());
throw new TransientException(errorMsg, e);
}
break;
default:
break;
}
// Throw a TransientException if below operations fail to avoid workflow inconsistencies.
try {
executionDAOFacade.updateTask(task);
} catch (Exception e) {
String errorMsg =
String.format(
"Error updating task: %s for workflow: %s",
task.getTaskId(), workflowId);
LOGGER.error(errorMsg, e);
Monitors.recordTaskUpdateError(task.getTaskType(), workflowInstance.getWorkflowName());
throw new TransientException(errorMsg, e);
}
try {
notifyTaskStatusListener(task);
} catch (Exception e) {
String errorMsg =
String.format(
"Error while notifying TaskStatusListener: %s for workflow: %s",
task.getTaskId(), workflowId);
LOGGER.error(errorMsg, e);
}
taskResult.getLogs().forEach(taskExecLog -> taskExecLog.setTaskId(task.getTaskId()));
executionDAOFacade.addTaskExecLog(taskResult.getLogs());
if (task.getStatus().isTerminal()) {
long duration = getTaskDuration(0, task);
long lastDuration = task.getEndTime() - task.getStartTime();
Monitors.recordTaskExecutionTime(
task.getTaskDefName(), duration, true, task.getStatus());
Monitors.recordTaskExecutionTime(
task.getTaskDefName(), lastDuration, false, task.getStatus());
}
if (!isLazyEvaluateWorkflow(workflowInstance.getWorkflowDefinition(), task)) {
decide(workflowId);
}
}
private void notifyTaskStatusListener(TaskModel task) {
switch (task.getStatus()) {
case COMPLETED:
taskStatusListener.onTaskCompleted(task);
break;
case CANCELED:
taskStatusListener.onTaskCanceled(task);
break;
case FAILED:
taskStatusListener.onTaskFailed(task);
break;
case FAILED_WITH_TERMINAL_ERROR:
taskStatusListener.onTaskFailedWithTerminalError(task);
break;
case TIMED_OUT:
taskStatusListener.onTaskTimedOut(task);
break;
case IN_PROGRESS:
taskStatusListener.onTaskInProgress(task);
break;
case SCHEDULED:
// no-op, already done in addTaskToQueue
default:
break;
}
}
private void extendLease(TaskResult taskResult) {
TaskModel task =
Optional.ofNullable(executionDAOFacade.getTaskModel(taskResult.getTaskId()))
.orElseThrow(
() ->
new NotFoundException(
"No such task found by id: %s",
taskResult.getTaskId()));
LOGGER.debug(
"Extend lease for Task: {} belonging to Workflow: {}",
task,
task.getWorkflowInstanceId());
if (!task.getStatus().isTerminal()) {
try {
executionDAOFacade.extendLease(task);
} catch (Exception e) {
String errorMsg =
String.format(
"Error extend lease for Task: %s belonging to Workflow: %s",
task.getTaskId(), task.getWorkflowInstanceId());
LOGGER.error(errorMsg, e);
Monitors.recordTaskExtendLeaseError(task.getTaskType(), task.getWorkflowType());
throw new TransientException(errorMsg, e);
}
}
}
/**
* Determines if a workflow can be lazily evaluated, if it meets any of these criteria
*
* <ul>
* <li>The task is NOT a loop task within DO_WHILE
* <li>The task is one of the intermediate tasks in a branch within a FORK_JOIN
* <li>The task is forked from a FORK_JOIN_DYNAMIC
* </ul>
*
* @param workflowDef The workflow definition of the workflow for which evaluation decision is
* to be made
* @param task The task which is attempting to trigger the evaluation
* @return true if workflow can be lazily evaluated, false otherwise
*/
@VisibleForTesting
boolean isLazyEvaluateWorkflow(WorkflowDef workflowDef, TaskModel task) {
if (task.isLoopOverTask()) {
return false;
}
String taskRefName = task.getReferenceTaskName();
List<WorkflowTask> workflowTasks = workflowDef.collectTasks();
List<WorkflowTask> forkTasks =
workflowTasks.stream()
.filter(t -> t.getType().equals(TaskType.FORK_JOIN.name()))
.collect(Collectors.toList());
List<WorkflowTask> joinTasks =
workflowTasks.stream()
.filter(t -> t.getType().equals(TaskType.JOIN.name()))
.collect(Collectors.toList());
if (forkTasks.stream().anyMatch(fork -> fork.has(taskRefName))) {
return joinTasks.stream().anyMatch(join -> join.getJoinOn().contains(taskRefName))
&& task.getStatus().isSuccessful();
}
return workflowTasks.stream().noneMatch(t -> t.getTaskReferenceName().equals(taskRefName))
&& task.getStatus().isSuccessful();
}
public TaskModel getTask(String taskId) {
return Optional.ofNullable(executionDAOFacade.getTaskModel(taskId))
.map(
task -> {
if (task.getWorkflowTask() != null) {
return metadataMapperService.populateTaskWithDefinition(task);
}
return task;
})
.orElse(null);
}
public List<Workflow> getRunningWorkflows(String workflowName, int version) {
return executionDAOFacade.getPendingWorkflowsByName(workflowName, version);
}
public List<String> getWorkflows(String name, Integer version, Long startTime, Long endTime) {
return executionDAOFacade.getWorkflowsByName(name, startTime, endTime).stream()
.filter(workflow -> workflow.getWorkflowVersion() == version)
.map(Workflow::getWorkflowId)
.collect(Collectors.toList());
}
public List<String> getRunningWorkflowIds(String workflowName, int version) {
return executionDAOFacade.getRunningWorkflowIds(workflowName, version);
}
@EventListener(WorkflowEvaluationEvent.class)
public void handleWorkflowEvaluationEvent(WorkflowEvaluationEvent wee) {
decide(wee.getWorkflowModel());
}
/** Records a metric for the "decide" process. */
public WorkflowModel decide(String workflowId) {
StopWatch watch = new StopWatch();
watch.start();
if (!executionLockService.acquireLock(workflowId)) {
return null;
}
try {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (workflow == null) {
// This can happen if the workflowId is incorrect
return null;
}
return decide(workflow);
} finally {
executionLockService.releaseLock(workflowId);
watch.stop();
Monitors.recordWorkflowDecisionTime(watch.getTime());
}
}
/**
* This method overloads the {@link #decide(String)}. It will acquire a lock and evaluate the
* state of the workflow.
*
* @param workflow the workflow to evaluate the state for
* @return the workflow
*/
public WorkflowModel decideWithLock(WorkflowModel workflow) {
if (workflow == null) {
return null;
}
StopWatch watch = new StopWatch();
watch.start();
if (!executionLockService.acquireLock(workflow.getWorkflowId())) {
return null;
}
try {
return decide(workflow);
} finally {
executionLockService.releaseLock(workflow.getWorkflowId());
watch.stop();
Monitors.recordWorkflowDecisionTime(watch.getTime());
}
}
/**
* @param workflow the workflow to evaluate the state for
* @return true if the workflow has completed (success or failed), false otherwise. Note: This
* method does not acquire the lock on the workflow and should ony be called / overridden if
* No locking is required or lock is acquired externally
*/
public WorkflowModel decide(WorkflowModel workflow) {
if (workflow.getStatus().isTerminal()) {
if (!workflow.getStatus().isSuccessful()) {
cancelNonTerminalTasks(workflow);
}
return workflow;
}
// we find any sub workflow tasks that have changed
// and change the workflow/task state accordingly
adjustStateIfSubWorkflowChanged(workflow);
try {
DeciderService.DeciderOutcome outcome = deciderService.decide(workflow);
if (outcome.isComplete) {
endExecution(workflow, outcome.terminateTask);
return workflow;
}
List<TaskModel> tasksToBeScheduled = outcome.tasksToBeScheduled;
setTaskDomains(tasksToBeScheduled, workflow);
List<TaskModel> tasksToBeUpdated = outcome.tasksToBeUpdated;
tasksToBeScheduled = dedupAndAddTasks(workflow, tasksToBeScheduled);
boolean stateChanged = scheduleTask(workflow, tasksToBeScheduled); // start
for (TaskModel task : outcome.tasksToBeScheduled) {
executionDAOFacade.populateTaskData(task);
if (systemTaskRegistry.isSystemTask(task.getTaskType())
&& NON_TERMINAL_TASK.test(task)) {
WorkflowSystemTask workflowSystemTask =
systemTaskRegistry.get(task.getTaskType());
if (!workflowSystemTask.isAsync()
&& workflowSystemTask.execute(workflow, task, this)) {
tasksToBeUpdated.add(task);
stateChanged = true;
}
}
}
if (!outcome.tasksToBeUpdated.isEmpty() || !tasksToBeScheduled.isEmpty()) {
executionDAOFacade.updateTasks(tasksToBeUpdated);
}
if (stateChanged) {
return decide(workflow);
}
if (!outcome.tasksToBeUpdated.isEmpty() || !tasksToBeScheduled.isEmpty()) {
executionDAOFacade.updateWorkflow(workflow);
}
return workflow;
} catch (TerminateWorkflowException twe) {
LOGGER.info("Execution terminated of workflow: {}", workflow, twe);
terminate(workflow, twe);
return workflow;
} catch (RuntimeException e) {
LOGGER.error("Error deciding workflow: {}", workflow.getWorkflowId(), e);
throw e;
}
}
private void adjustStateIfSubWorkflowChanged(WorkflowModel workflow) {
Optional<TaskModel> changedSubWorkflowTask = findChangedSubWorkflowTask(workflow);
if (changedSubWorkflowTask.isPresent()) {
// reset the flag
TaskModel subWorkflowTask = changedSubWorkflowTask.get();
subWorkflowTask.setSubworkflowChanged(false);
executionDAOFacade.updateTask(subWorkflowTask);
LOGGER.info(
"{} reset subworkflowChanged flag for {}",
workflow.toShortString(),
subWorkflowTask.getTaskId());
// find all terminal and unsuccessful JOIN tasks and set them to IN_PROGRESS
if (workflow.getWorkflowDefinition().containsType(TaskType.TASK_TYPE_JOIN)
|| workflow.getWorkflowDefinition()
.containsType(TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC)) {
// if we are here, then the SUB_WORKFLOW task could be part of a FORK_JOIN or
// FORK_JOIN_DYNAMIC
// and the JOIN task(s) needs to be evaluated again, set them to IN_PROGRESS
workflow.getTasks().stream()
.filter(UNSUCCESSFUL_JOIN_TASK)
.peek(
task -> {
task.setStatus(TaskModel.Status.IN_PROGRESS);
addTaskToQueue(task);
})
.forEach(executionDAOFacade::updateTask);
}
}
}
private Optional<TaskModel> findChangedSubWorkflowTask(WorkflowModel workflow) {
WorkflowDef workflowDef =
Optional.ofNullable(workflow.getWorkflowDefinition())
.orElseGet(
() ->
metadataDAO
.getWorkflowDef(
workflow.getWorkflowName(),
workflow.getWorkflowVersion())
.orElseThrow(
() ->
new TransientException(
"Workflow Definition is not found")));
if (workflowDef.containsType(TaskType.TASK_TYPE_SUB_WORKFLOW)
|| workflow.getWorkflowDefinition()
.containsType(TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC)) {
return workflow.getTasks().stream()
.filter(
t ->
t.getTaskType().equals(TaskType.TASK_TYPE_SUB_WORKFLOW)
&& t.isSubworkflowChanged()
&& !t.isRetried())
.findFirst();
}
return Optional.empty();
}
@VisibleForTesting
List<String> cancelNonTerminalTasks(WorkflowModel workflow) {
List<String> erroredTasks = new ArrayList<>();
// Update non-terminal tasks' status to CANCELED
for (TaskModel task : workflow.getTasks()) {
if (!task.getStatus().isTerminal()) {
// Cancel the ones which are not completed yet....
task.setStatus(CANCELED);
if (systemTaskRegistry.isSystemTask(task.getTaskType())) {
WorkflowSystemTask workflowSystemTask =
systemTaskRegistry.get(task.getTaskType());
try {
workflowSystemTask.cancel(workflow, task, this);
} catch (Exception e) {
erroredTasks.add(task.getReferenceTaskName());
LOGGER.error(
"Error canceling system task:{}/{} in workflow: {}",
workflowSystemTask.getTaskType(),
task.getTaskId(),
workflow.getWorkflowId(),
e);
}
}
executionDAOFacade.updateTask(task);
}
}
if (erroredTasks.isEmpty()) {
try {
workflowStatusListener.onWorkflowFinalizedIfEnabled(workflow);
queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId());
} catch (Exception e) {
LOGGER.error(
"Error removing workflow: {} from decider queue",
workflow.getWorkflowId(),
e);
}
}
return erroredTasks;
}
@VisibleForTesting
List<TaskModel> dedupAndAddTasks(WorkflowModel workflow, List<TaskModel> tasks) {
Set<String> tasksInWorkflow =
workflow.getTasks().stream()
.map(task -> task.getReferenceTaskName() + "_" + task.getRetryCount())
.collect(Collectors.toSet());
List<TaskModel> dedupedTasks =
tasks.stream()
.filter(
task ->
!tasksInWorkflow.contains(
task.getReferenceTaskName()
+ "_"
+ task.getRetryCount()))
.collect(Collectors.toList());
workflow.getTasks().addAll(dedupedTasks);
return dedupedTasks;
}
/**
* @throws ConflictException if the workflow is in terminal state.
*/
public void pauseWorkflow(String workflowId) {
try {
executionLockService.acquireLock(workflowId, 60000);
WorkflowModel.Status status = WorkflowModel.Status.PAUSED;
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, false);
if (workflow.getStatus().isTerminal()) {
throw new ConflictException(
"Workflow %s has ended, status cannot be updated.",
workflow.toShortString());
}
if (workflow.getStatus().equals(status)) {
return; // Already paused!
}
workflow.setStatus(status);
executionDAOFacade.updateWorkflow(workflow);
} finally {
executionLockService.releaseLock(workflowId);
}
// remove from the sweep queue
// any exceptions can be ignored, as this is not critical to the pause operation
try {
queueDAO.remove(DECIDER_QUEUE, workflowId);
} catch (Exception e) {
LOGGER.info(
"[pauseWorkflow] Error removing workflow: {} from decider queue",
workflowId,
e);
}
}
/**
* @param workflowId the workflow to be resumed
* @throws IllegalStateException if the workflow is not in PAUSED state
*/
public void resumeWorkflow(String workflowId) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, false);
if (!workflow.getStatus().equals(WorkflowModel.Status.PAUSED)) {
throw new IllegalStateException(
"The workflow "
+ workflowId
+ " is not PAUSED so cannot resume. "
+ "Current status is "
+ workflow.getStatus().name());
}
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setLastRetriedTime(System.currentTimeMillis());
// Add to decider queue
queueDAO.push(
DECIDER_QUEUE,
workflow.getWorkflowId(),
workflow.getPriority(),
properties.getWorkflowOffsetTimeout().getSeconds());
executionDAOFacade.updateWorkflow(workflow);
decide(workflowId);
}
/**
* @param workflowId the id of the workflow
* @param taskReferenceName the referenceName of the task to be skipped
* @param skipTaskRequest the {@link SkipTaskRequest} object
* @throws IllegalStateException
*/
public void skipTaskFromWorkflow(
String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
// If the workflow is not running then cannot skip any task
if (!workflow.getStatus().equals(WorkflowModel.Status.RUNNING)) {
String errorMsg =
String.format(
"The workflow %s is not running so the task referenced by %s cannot be skipped",
workflowId, taskReferenceName);
throw new IllegalStateException(errorMsg);
}
// Check if the reference name is as per the workflowdef
WorkflowTask workflowTask =
workflow.getWorkflowDefinition().getTaskByRefName(taskReferenceName);
if (workflowTask == null) {
String errorMsg =
String.format(
"The task referenced by %s does not exist in the WorkflowDefinition %s",
taskReferenceName, workflow.getWorkflowName());
throw new IllegalStateException(errorMsg);
}
// If the task is already started the again it cannot be skipped
workflow.getTasks()
.forEach(
task -> {
if (task.getReferenceTaskName().equals(taskReferenceName)) {
String errorMsg =
String.format(
"The task referenced %s has already been processed, cannot be skipped",
taskReferenceName);
throw new IllegalStateException(errorMsg);
}
});
// Now create a "SKIPPED" task for this workflow
TaskModel taskToBeSkipped = new TaskModel();
taskToBeSkipped.setTaskId(idGenerator.generate());
taskToBeSkipped.setReferenceTaskName(taskReferenceName);
taskToBeSkipped.setWorkflowInstanceId(workflowId);
taskToBeSkipped.setWorkflowPriority(workflow.getPriority());
taskToBeSkipped.setStatus(SKIPPED);
taskToBeSkipped.setEndTime(System.currentTimeMillis());
taskToBeSkipped.setTaskType(workflowTask.getName());
taskToBeSkipped.setCorrelationId(workflow.getCorrelationId());
if (skipTaskRequest != null) {
taskToBeSkipped.setInputData(skipTaskRequest.getTaskInput());
taskToBeSkipped.setOutputData(skipTaskRequest.getTaskOutput());
taskToBeSkipped.setInputMessage(skipTaskRequest.getTaskInputMessage());
taskToBeSkipped.setOutputMessage(skipTaskRequest.getTaskOutputMessage());
}
executionDAOFacade.createTasks(Collections.singletonList(taskToBeSkipped));
decide(workflow.getWorkflowId());
}
public WorkflowModel getWorkflow(String workflowId, boolean includeTasks) {
return executionDAOFacade.getWorkflowModel(workflowId, includeTasks);
}
public void addTaskToQueue(TaskModel task) {
// put in queue
String taskQueueName = QueueUtils.getQueueName(task);
if (task.getCallbackAfterSeconds() > 0) {
queueDAO.push(
taskQueueName,
task.getTaskId(),
task.getWorkflowPriority(),
task.getCallbackAfterSeconds());
} else {
queueDAO.push(taskQueueName, task.getTaskId(), task.getWorkflowPriority(), 0);
}
LOGGER.debug(
"Added task {} with priority {} to queue {} with call back seconds {}",
task,
task.getWorkflowPriority(),
taskQueueName,
task.getCallbackAfterSeconds());
}
@VisibleForTesting
void setTaskDomains(List<TaskModel> tasks, WorkflowModel workflow) {
Map<String, String> taskToDomain = workflow.getTaskToDomain();
if (taskToDomain != null) {
// Step 1: Apply * mapping to all tasks, if present.
String domainstr = taskToDomain.get("*");
if (StringUtils.isNotBlank(domainstr)) {
String[] domains = domainstr.split(",");
tasks.forEach(
task -> {
// Filter out SystemTask
if (!systemTaskRegistry.isSystemTask(task.getTaskType())) {
// Check which domain worker is polling
// Set the task domain
task.setDomain(getActiveDomain(task.getTaskType(), domains));
}
});
}
// Step 2: Override additional mappings.
tasks.forEach(
task -> {
if (!systemTaskRegistry.isSystemTask(task.getTaskType())) {
String taskDomainstr = taskToDomain.get(task.getTaskType());
if (taskDomainstr != null) {
task.setDomain(
getActiveDomain(
task.getTaskType(), taskDomainstr.split(",")));
}
}
});
}
}
/**
* Gets the active domain from the list of domains where the task is to be queued. The domain
* list must be ordered. In sequence, check if any worker has polled for last
* `activeWorkerLastPollMs`, if so that is the Active domain. When no active domains are found:
* <li>If NO_DOMAIN token is provided, return null.
* <li>Else, return last domain from list.
*
* @param taskType the taskType of the task for which active domain is to be found
* @param domains the array of domains for the task. (Must contain atleast one element).
* @return the active domain where the task will be queued
*/
@VisibleForTesting
String getActiveDomain(String taskType, String[] domains) {
if (domains == null || domains.length == 0) {
return null;
}
return Arrays.stream(domains)
.filter(domain -> !domain.equalsIgnoreCase("NO_DOMAIN"))
.map(domain -> executionDAOFacade.getTaskPollDataByDomain(taskType, domain.trim()))
.filter(Objects::nonNull)
.filter(validateLastPolledTime)
.findFirst()
.map(PollData::getDomain)
.orElse(
domains[domains.length - 1].trim().equalsIgnoreCase("NO_DOMAIN")
? null
: domains[domains.length - 1].trim());
}
private long getTaskDuration(long s, TaskModel task) {
long duration = task.getEndTime() - task.getStartTime();
s += duration;
if (task.getRetriedTaskId() == null) {
return s;
}
return s + getTaskDuration(s, executionDAOFacade.getTaskModel(task.getRetriedTaskId()));
}
@VisibleForTesting
boolean scheduleTask(WorkflowModel workflow, List<TaskModel> tasks) {
List<TaskModel> tasksToBeQueued;
boolean startedSystemTasks = false;
try {
if (tasks == null || tasks.isEmpty()) {
return false;
}
// Get the highest seq number
int count = workflow.getTasks().stream().mapToInt(TaskModel::getSeq).max().orElse(0);
for (TaskModel task : tasks) {
if (task.getSeq() == 0) { // Set only if the seq was not set
task.setSeq(++count);
}
}
// metric to track the distribution of number of tasks within a workflow
Monitors.recordNumTasksInWorkflow(
workflow.getTasks().size() + tasks.size(),
workflow.getWorkflowName(),
String.valueOf(workflow.getWorkflowVersion()));
// Save the tasks in the DAO
executionDAOFacade.createTasks(tasks);
List<TaskModel> systemTasks =
tasks.stream()
.filter(task -> systemTaskRegistry.isSystemTask(task.getTaskType()))
.collect(Collectors.toList());
tasksToBeQueued =
tasks.stream()
.filter(task -> !systemTaskRegistry.isSystemTask(task.getTaskType()))
.collect(Collectors.toList());
// Traverse through all the system tasks, start the sync tasks, in case of async queue
// the tasks
for (TaskModel task : systemTasks) {
WorkflowSystemTask workflowSystemTask = systemTaskRegistry.get(task.getTaskType());
if (workflowSystemTask == null) {
throw new NotFoundException(
"No system task found by name %s", task.getTaskType());
}
if (task.getStatus() != null
&& !task.getStatus().isTerminal()
&& task.getStartTime() == 0) {
task.setStartTime(System.currentTimeMillis());
}
if (!workflowSystemTask.isAsync()) {
try {
// start execution of synchronous system tasks
workflowSystemTask.start(workflow, task, this);
} catch (Exception e) {
String errorMsg =
String.format(
"Unable to start system task: %s, {id: %s, name: %s}",
task.getTaskType(),
task.getTaskId(),
task.getTaskDefName());
throw new NonTransientException(errorMsg, e);
}
startedSystemTasks = true;
executionDAOFacade.updateTask(task);
} else {
tasksToBeQueued.add(task);
}
}
} catch (Exception e) {
List<String> taskIds =
tasks.stream().map(TaskModel::getTaskId).collect(Collectors.toList());
String errorMsg =
String.format(
"Error scheduling tasks: %s, for workflow: %s",
taskIds, workflow.getWorkflowId());
LOGGER.error(errorMsg, e);
Monitors.error(CLASS_NAME, "scheduleTask");
throw new TerminateWorkflowException(errorMsg);
}
// On addTaskToQueue failures, ignore the exceptions and let WorkflowRepairService take care
// of republishing the messages to the queue.
try {
addTaskToQueue(tasksToBeQueued);
} catch (Exception e) {
List<String> taskIds =
tasksToBeQueued.stream().map(TaskModel::getTaskId).collect(Collectors.toList());
String errorMsg =
String.format(
"Error pushing tasks to the queue: %s, for workflow: %s",
taskIds, workflow.getWorkflowId());
LOGGER.warn(errorMsg, e);
Monitors.error(CLASS_NAME, "scheduleTask");
}
return startedSystemTasks;
}
private void addTaskToQueue(final List<TaskModel> tasks) {
for (TaskModel task : tasks) {
addTaskToQueue(task);
// notify TaskStatusListener
try {
taskStatusListener.onTaskScheduled(task);
} catch (Exception e) {
String errorMsg =
String.format(
"Error while notifying TaskStatusListener: %s for workflow: %s",
task.getTaskId(), task.getWorkflowInstanceId());
LOGGER.error(errorMsg, e);
}
}
}
private WorkflowModel terminate(
final WorkflowModel workflow, TerminateWorkflowException terminateWorkflowException) {
if (!workflow.getStatus().isTerminal()) {
workflow.setStatus(terminateWorkflowException.getWorkflowStatus());
}
if (terminateWorkflowException.getTask() != null && workflow.getFailedTaskId() == null) {
workflow.setFailedTaskId(terminateWorkflowException.getTask().getTaskId());
}
String failureWorkflow = workflow.getWorkflowDefinition().getFailureWorkflow();
if (failureWorkflow != null) {
if (failureWorkflow.startsWith("$")) {
String[] paramPathComponents = failureWorkflow.split("\\.");
String name = paramPathComponents[2]; // name of the input parameter
failureWorkflow = (String) workflow.getInput().get(name);
}
}
if (terminateWorkflowException.getTask() != null) {
executionDAOFacade.updateTask(terminateWorkflowException.getTask());
}
return terminateWorkflow(
workflow, terminateWorkflowException.getMessage(), failureWorkflow);
}
private boolean rerunWF(
String workflowId,
String taskId,
Map<String, Object> taskInput,
Map<String, Object> workflowInput,
String correlationId) {
// Get the workflow
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
String errorMsg =
String.format(
"Workflow: %s is not in terminal state, unable to rerun.", workflow);
LOGGER.error(errorMsg);
throw new ConflictException(errorMsg);
}
updateAndPushParents(workflow, "reran");
// If the task Id is null it implies that the entire workflow has to be rerun
if (taskId == null) {
// remove all tasks
workflow.getTasks().forEach(task -> executionDAOFacade.removeTask(task.getTaskId()));
workflow.setTasks(new ArrayList<>());
// Set workflow as RUNNING
workflow.setStatus(WorkflowModel.Status.RUNNING);
// Reset failure reason from previous run to default
workflow.setReasonForIncompletion(null);
workflow.setFailedTaskId(null);
workflow.setFailedReferenceTaskNames(new HashSet<>());
workflow.setFailedTaskNames(new HashSet<>());
if (correlationId != null) {
workflow.setCorrelationId(correlationId);
}
if (workflowInput != null) {
workflow.setInput(workflowInput);
}
queueDAO.push(
DECIDER_QUEUE,
workflow.getWorkflowId(),
workflow.getPriority(),
properties.getWorkflowOffsetTimeout().getSeconds());
executionDAOFacade.updateWorkflow(workflow);
decide(workflowId);
return true;
}
// Now iterate through the tasks and find the "specific" task
TaskModel rerunFromTask = null;
for (TaskModel task : workflow.getTasks()) {
if (task.getTaskId().equals(taskId)) {
rerunFromTask = task;
break;
}
}
// If not found look into sub workflows
if (rerunFromTask == null) {
for (TaskModel task : workflow.getTasks()) {
if (task.getTaskType().equalsIgnoreCase(TaskType.TASK_TYPE_SUB_WORKFLOW)) {
String subWorkflowId = task.getSubWorkflowId();
if (rerunWF(subWorkflowId, taskId, taskInput, null, null)) {
rerunFromTask = task;
break;
}
}
}
}
if (rerunFromTask != null) {
// set workflow as RUNNING
workflow.setStatus(WorkflowModel.Status.RUNNING);
// Reset failure reason from previous run to default
workflow.setReasonForIncompletion(null);
workflow.setFailedTaskId(null);
workflow.setFailedReferenceTaskNames(new HashSet<>());
workflow.setFailedTaskNames(new HashSet<>());
if (correlationId != null) {
workflow.setCorrelationId(correlationId);
}
if (workflowInput != null) {
workflow.setInput(workflowInput);
}
// Add to decider queue
queueDAO.push(
DECIDER_QUEUE,
workflow.getWorkflowId(),
workflow.getPriority(),
properties.getWorkflowOffsetTimeout().getSeconds());
executionDAOFacade.updateWorkflow(workflow);
// update tasks in datastore to update workflow-tasks relationship for archived
// workflows
executionDAOFacade.updateTasks(workflow.getTasks());
// Remove all tasks after the "rerunFromTask"
List<TaskModel> filteredTasks = new ArrayList<>();
for (TaskModel task : workflow.getTasks()) {
if (task.getSeq() > rerunFromTask.getSeq()) {
executionDAOFacade.removeTask(task.getTaskId());
} else {
filteredTasks.add(task);
}
}
workflow.setTasks(filteredTasks);
// reset fields before restarting the task
rerunFromTask.setScheduledTime(System.currentTimeMillis());
rerunFromTask.setStartTime(0);
rerunFromTask.setUpdateTime(0);
rerunFromTask.setEndTime(0);
rerunFromTask.clearOutput();
rerunFromTask.setRetried(false);
rerunFromTask.setExecuted(false);
if (rerunFromTask.getTaskType().equalsIgnoreCase(TaskType.TASK_TYPE_SUB_WORKFLOW)) {
// if task is sub workflow set task as IN_PROGRESS and reset start time
rerunFromTask.setStatus(IN_PROGRESS);
rerunFromTask.setStartTime(System.currentTimeMillis());
} else {
if (taskInput != null) {
rerunFromTask.setInputData(taskInput);
}
if (systemTaskRegistry.isSystemTask(rerunFromTask.getTaskType())
&& !systemTaskRegistry.get(rerunFromTask.getTaskType()).isAsync()) {
// Start the synchronous system task directly
systemTaskRegistry
.get(rerunFromTask.getTaskType())
.start(workflow, rerunFromTask, this);
} else {
// Set the task to rerun as SCHEDULED
rerunFromTask.setStatus(SCHEDULED);
addTaskToQueue(rerunFromTask);
}
}
executionDAOFacade.updateTask(rerunFromTask);
decide(workflow.getWorkflowId());
return true;
}
return false;
}
public void scheduleNextIteration(TaskModel loopTask, WorkflowModel workflow) {
// Schedule only first loop over task. Rest will be taken care in Decider Service when this
// task will get completed.
List<TaskModel> scheduledLoopOverTasks =
deciderService.getTasksToBeScheduled(
workflow,
loopTask.getWorkflowTask().getLoopOver().get(0),
loopTask.getRetryCount(),
null);
setTaskDomains(scheduledLoopOverTasks, workflow);
scheduledLoopOverTasks.forEach(
t -> {
t.setReferenceTaskName(
TaskUtils.appendIteration(
t.getReferenceTaskName(), loopTask.getIteration()));
t.setIteration(loopTask.getIteration());
});
scheduleTask(workflow, scheduledLoopOverTasks);
workflow.getTasks().addAll(scheduledLoopOverTasks);
}
public TaskDef getTaskDefinition(TaskModel task) {
return task.getTaskDefinition()
.orElseGet(
() ->
Optional.ofNullable(
metadataDAO.getTaskDef(
task.getWorkflowTask().getName()))
.orElseThrow(
() -> {
String reason =
String.format(
"Invalid task specified. Cannot find task by name %s in the task definitions",
task.getWorkflowTask()
.getName());
return new TerminateWorkflowException(reason);
}));
}
@VisibleForTesting
void updateParentWorkflowTask(WorkflowModel subWorkflow) {
TaskModel subWorkflowTask =
executionDAOFacade.getTaskModel(subWorkflow.getParentWorkflowTaskId());
executeSubworkflowTaskAndSyncData(subWorkflow, subWorkflowTask);
executionDAOFacade.updateTask(subWorkflowTask);
}
private void executeSubworkflowTaskAndSyncData(
WorkflowModel subWorkflow, TaskModel subWorkflowTask) {
WorkflowSystemTask subWorkflowSystemTask =
systemTaskRegistry.get(TaskType.TASK_TYPE_SUB_WORKFLOW);
subWorkflowSystemTask.execute(subWorkflow, subWorkflowTask, this);
}
/**
* Pushes workflow id into the decider queue with a higher priority to expedite evaluation.
*
* @param workflowId The workflow to be evaluated at higher priority
*/
private void expediteLazyWorkflowEvaluation(String workflowId) {
if (queueDAO.containsMessage(DECIDER_QUEUE, workflowId)) {
queueDAO.postpone(DECIDER_QUEUE, workflowId, EXPEDITED_PRIORITY, 0);
} else {
queueDAO.push(DECIDER_QUEUE, workflowId, EXPEDITED_PRIORITY, 0);
}
LOGGER.info("Pushed workflow {} to {} for expedited evaluation", workflowId, DECIDER_QUEUE);
}
}
| 6,679 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Join.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN;
@Component(TASK_TYPE_JOIN)
public class Join extends WorkflowSystemTask {
public Join() {
super(TASK_TYPE_JOIN);
}
@Override
@SuppressWarnings("unchecked")
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
boolean allDone = true;
boolean hasFailures = false;
StringBuilder failureReason = new StringBuilder();
StringBuilder optionalTaskFailures = new StringBuilder();
List<String> joinOn = (List<String>) task.getInputData().get("joinOn");
if (task.isLoopOverTask()) {
// If join is part of loop over task, wait for specific iteration to get complete
joinOn =
joinOn.stream()
.map(name -> TaskUtils.appendIteration(name, task.getIteration()))
.collect(Collectors.toList());
}
for (String joinOnRef : joinOn) {
TaskModel forkedTask = workflow.getTaskByRefName(joinOnRef);
if (forkedTask == null) {
// Task is not even scheduled yet
allDone = false;
break;
}
TaskModel.Status taskStatus = forkedTask.getStatus();
hasFailures = !taskStatus.isSuccessful() && !forkedTask.getWorkflowTask().isOptional();
if (hasFailures) {
failureReason.append(forkedTask.getReasonForIncompletion()).append(" ");
}
// Only add to task output if it's not empty
if (!forkedTask.getOutputData().isEmpty()) {
task.addOutput(joinOnRef, forkedTask.getOutputData());
}
if (!taskStatus.isTerminal()) {
allDone = false;
}
if (hasFailures) {
break;
}
// check for optional task failures
if (forkedTask.getWorkflowTask().isOptional()
&& taskStatus == TaskModel.Status.COMPLETED_WITH_ERRORS) {
optionalTaskFailures
.append(
String.format(
"%s/%s",
forkedTask.getTaskDefName(), forkedTask.getTaskId()))
.append(" ");
}
}
if (allDone || hasFailures || optionalTaskFailures.length() > 0) {
if (hasFailures) {
task.setReasonForIncompletion(failureReason.toString());
task.setStatus(TaskModel.Status.FAILED);
} else if (optionalTaskFailures.length() > 0) {
task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS);
optionalTaskFailures.append("completed with errors");
task.setReasonForIncompletion(optionalTaskFailures.toString());
} else {
task.setStatus(TaskModel.Status.COMPLETED);
}
return true;
}
return false;
}
@Override
public Optional<Long> getEvaluationOffset(TaskModel taskModel, long defaultOffset) {
int index = taskModel.getPollCount() > 0 ? taskModel.getPollCount() - 1 : 0;
if (index == 0) {
return Optional.of(0L);
}
return Optional.of(Math.min((long) Math.pow(2, index), defaultOffset));
}
public boolean isAsync() {
return true;
}
}
| 6,680 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Terminate.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_TERMINATE;
import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.*;
/**
* Task that can terminate a workflow with a given status and modify the workflow's output with a
* given parameter, it can act as a "return" statement for conditions where you simply want to
* terminate your workflow. For example, if you have a decision where the first condition is met,
* you want to execute some tasks, otherwise you want to finish your workflow.
*
* <pre>
* ...
* {
* "tasks": [
* {
* "name": "terminate",
* "taskReferenceName": "terminate0",
* "inputParameters": {
* "terminationStatus": "COMPLETED",
* "workflowOutput": "${task0.output}"
* },
* "type": "TERMINATE",
* "startDelay": 0,
* "optional": false
* }
* ]
* }
* ...
* </pre>
*
* This task has some validations on creation and execution, they are: - the "terminationStatus"
* parameter is mandatory and it can only receive the values "COMPLETED" or "FAILED" - the terminate
* task cannot be optional
*/
@Component(TASK_TYPE_TERMINATE)
public class Terminate extends WorkflowSystemTask {
private static final String TERMINATION_STATUS_PARAMETER = "terminationStatus";
private static final String TERMINATION_REASON_PARAMETER = "terminationReason";
private static final String TERMINATION_WORKFLOW_OUTPUT = "workflowOutput";
public Terminate() {
super(TASK_TYPE_TERMINATE);
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
String returnStatus = (String) task.getInputData().get(TERMINATION_STATUS_PARAMETER);
if (validateInputStatus(returnStatus)) {
task.setOutputData(getInputFromParam(task.getInputData()));
task.setStatus(TaskModel.Status.COMPLETED);
return true;
}
task.setReasonForIncompletion("given termination status is not valid");
task.setStatus(TaskModel.Status.FAILED);
return false;
}
public static String getTerminationStatusParameter() {
return TERMINATION_STATUS_PARAMETER;
}
public static String getTerminationReasonParameter() {
return TERMINATION_REASON_PARAMETER;
}
public static String getTerminationWorkflowOutputParameter() {
return TERMINATION_WORKFLOW_OUTPUT;
}
public static Boolean validateInputStatus(String status) {
return COMPLETED.name().equals(status)
|| FAILED.name().equals(status)
|| TERMINATED.name().equals(status);
}
@SuppressWarnings("unchecked")
private Map<String, Object> getInputFromParam(Map<String, Object> taskInput) {
HashMap<String, Object> output = new HashMap<>();
if (taskInput.get(TERMINATION_WORKFLOW_OUTPUT) == null) {
return output;
}
if (taskInput.get(TERMINATION_WORKFLOW_OUTPUT) instanceof HashMap) {
output.putAll((HashMap<String, Object>) taskInput.get(TERMINATION_WORKFLOW_OUTPUT));
return output;
}
output.put("output", taskInput.get(TERMINATION_WORKFLOW_OUTPUT));
return output;
}
}
| 6,681 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExclusiveJoin.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.List;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_EXCLUSIVE_JOIN;
@Component(TASK_TYPE_EXCLUSIVE_JOIN)
public class ExclusiveJoin extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(ExclusiveJoin.class);
private static final String DEFAULT_EXCLUSIVE_JOIN_TASKS = "defaultExclusiveJoinTask";
public ExclusiveJoin() {
super(TASK_TYPE_EXCLUSIVE_JOIN);
}
@Override
@SuppressWarnings("unchecked")
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
boolean foundExlusiveJoinOnTask = false;
boolean hasFailures = false;
StringBuilder failureReason = new StringBuilder();
TaskModel.Status taskStatus;
List<String> joinOn = (List<String>) task.getInputData().get("joinOn");
if (task.isLoopOverTask()) {
// If exclusive join is part of loop over task, wait for specific iteration to get
// complete
joinOn =
joinOn.stream()
.map(name -> TaskUtils.appendIteration(name, task.getIteration()))
.collect(Collectors.toList());
}
TaskModel exclusiveTask = null;
for (String joinOnRef : joinOn) {
LOGGER.debug("Exclusive Join On Task {} ", joinOnRef);
exclusiveTask = workflow.getTaskByRefName(joinOnRef);
if (exclusiveTask == null || exclusiveTask.getStatus() == TaskModel.Status.SKIPPED) {
LOGGER.debug("The task {} is either not scheduled or skipped.", joinOnRef);
continue;
}
taskStatus = exclusiveTask.getStatus();
foundExlusiveJoinOnTask = taskStatus.isTerminal();
hasFailures = !taskStatus.isSuccessful();
if (hasFailures) {
failureReason.append(exclusiveTask.getReasonForIncompletion()).append(" ");
}
break;
}
if (!foundExlusiveJoinOnTask) {
List<String> defaultExclusiveJoinTasks =
(List<String>) task.getInputData().get(DEFAULT_EXCLUSIVE_JOIN_TASKS);
LOGGER.info(
"Could not perform exclusive on Join Task(s). Performing now on default exclusive join task(s) {}, workflow: {}",
defaultExclusiveJoinTasks,
workflow.getWorkflowId());
if (defaultExclusiveJoinTasks != null && !defaultExclusiveJoinTasks.isEmpty()) {
for (String defaultExclusiveJoinTask : defaultExclusiveJoinTasks) {
// Pick the first task that we should join on and break.
exclusiveTask = workflow.getTaskByRefName(defaultExclusiveJoinTask);
if (exclusiveTask == null
|| exclusiveTask.getStatus() == TaskModel.Status.SKIPPED) {
LOGGER.debug(
"The task {} is either not scheduled or skipped.",
defaultExclusiveJoinTask);
continue;
}
taskStatus = exclusiveTask.getStatus();
foundExlusiveJoinOnTask = taskStatus.isTerminal();
hasFailures = !taskStatus.isSuccessful();
if (hasFailures) {
failureReason.append(exclusiveTask.getReasonForIncompletion()).append(" ");
}
break;
}
} else {
LOGGER.debug(
"Could not evaluate last tasks output. Verify the task configuration in the workflow definition.");
}
}
LOGGER.debug(
"Status of flags: foundExlusiveJoinOnTask: {}, hasFailures {}",
foundExlusiveJoinOnTask,
hasFailures);
if (foundExlusiveJoinOnTask || hasFailures) {
if (hasFailures) {
task.setReasonForIncompletion(failureReason.toString());
task.setStatus(TaskModel.Status.FAILED);
} else {
task.setOutputData(exclusiveTask.getOutputData());
task.setStatus(TaskModel.Status.COMPLETED);
}
LOGGER.debug("Task: {} status is: {}", task.getTaskId(), task.getStatus());
return true;
}
return false;
}
}
| 6,682 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskRegistry.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.stereotype.Component;
/**
* A container class that holds a mapping of system task types {@link
* com.netflix.conductor.common.metadata.tasks.TaskType} to {@link WorkflowSystemTask} instances.
*/
@Component
public class SystemTaskRegistry {
public static final String ASYNC_SYSTEM_TASKS_QUALIFIER = "asyncSystemTasks";
private final Map<String, WorkflowSystemTask> registry;
public SystemTaskRegistry(Set<WorkflowSystemTask> tasks) {
this.registry =
tasks.stream()
.collect(
Collectors.toMap(
WorkflowSystemTask::getTaskType, Function.identity()));
}
public WorkflowSystemTask get(String taskType) {
return Optional.ofNullable(registry.get(taskType))
.orElseThrow(
() ->
new IllegalStateException(
taskType + "not found in " + getClass().getSimpleName()));
}
public boolean isSystemTask(String taskType) {
return registry.containsKey(taskType);
}
}
| 6,683 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Lambda.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.events.ScriptEvaluator;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_LAMBDA;
/**
* @author X-Ultra
* <p>Task that enables execute Lambda script at workflow execution, For example,
* <pre>
* ...
* {
* "tasks": [
* {
* "name": "LAMBDA",
* "taskReferenceName": "lambda_test",
* "type": "LAMBDA",
* "inputParameters": {
* "input": "${workflow.input}",
* "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }"
* }
* }
* ]
* }
* ...
* </pre>
* then to use task output, e.g. <code>script_test.output.testvalue</code>
* @deprecated {@link Lambda} is deprecated. Use {@link Inline} task for inline expression
* evaluation. Also see ${@link com.netflix.conductor.common.metadata.workflow.WorkflowTask})
*/
@Deprecated
@Component(TASK_TYPE_LAMBDA)
public class Lambda extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(Lambda.class);
private static final String QUERY_EXPRESSION_PARAMETER = "scriptExpression";
public static final String NAME = "LAMBDA";
public Lambda() {
super(TASK_TYPE_LAMBDA);
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
Map<String, Object> taskInput = task.getInputData();
String scriptExpression;
try {
scriptExpression = (String) taskInput.get(QUERY_EXPRESSION_PARAMETER);
if (StringUtils.isNotBlank(scriptExpression)) {
String scriptExpressionBuilder =
"function scriptFun(){" + scriptExpression + "} scriptFun();";
LOGGER.debug(
"scriptExpressionBuilder: {}, task: {}",
scriptExpressionBuilder,
task.getTaskId());
Object returnValue = ScriptEvaluator.eval(scriptExpressionBuilder, taskInput);
task.addOutput("result", returnValue);
task.setStatus(TaskModel.Status.COMPLETED);
} else {
LOGGER.error("Empty {} in Lambda task. ", QUERY_EXPRESSION_PARAMETER);
task.setReasonForIncompletion(
"Empty '"
+ QUERY_EXPRESSION_PARAMETER
+ "' in Lambda task's input parameters. A non-empty String value must be provided.");
task.setStatus(TaskModel.Status.FAILED);
}
} catch (Exception e) {
LOGGER.error(
"Failed to execute Lambda Task: {} in workflow: {}",
task.getTaskId(),
workflow.getWorkflowId(),
e);
task.setStatus(TaskModel.Status.FAILED);
task.setReasonForIncompletion(e.getMessage());
task.addOutput(
"error", e.getCause() != null ? e.getCause().getMessage() : e.getMessage());
}
return true;
}
}
| 6,684 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Decision.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DECISION;
/**
* @deprecated {@link Decision} is deprecated. Use {@link Switch} task for condition evaluation
* using the extensible evaluation framework. Also see ${@link
* com.netflix.conductor.common.metadata.workflow.WorkflowTask}).
*/
@Deprecated
@Component(TASK_TYPE_DECISION)
public class Decision extends WorkflowSystemTask {
public Decision() {
super(TASK_TYPE_DECISION);
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
task.setStatus(TaskModel.Status.COMPLETED);
return true;
}
}
| 6,685 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/DoWhile.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.*;
import java.util.stream.Collectors;
import javax.script.ScriptException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.events.ScriptEvaluator;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DO_WHILE;
@Component(TASK_TYPE_DO_WHILE)
public class DoWhile extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(DoWhile.class);
private final ParametersUtils parametersUtils;
public DoWhile(ParametersUtils parametersUtils) {
super(TASK_TYPE_DO_WHILE);
this.parametersUtils = parametersUtils;
}
@Override
public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) {
task.setStatus(TaskModel.Status.CANCELED);
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel doWhileTaskModel, WorkflowExecutor workflowExecutor) {
boolean hasFailures = false;
StringBuilder failureReason = new StringBuilder();
Map<String, Object> output = new HashMap<>();
/*
* Get the latest set of tasks (the ones that have the highest retry count). We don't want to evaluate any tasks
* that have already failed if there is a more current one (a later retry count).
*/
Map<String, TaskModel> relevantTasks = new LinkedHashMap<>();
TaskModel relevantTask;
for (TaskModel t : workflow.getTasks()) {
if (doWhileTaskModel
.getWorkflowTask()
.has(TaskUtils.removeIterationFromTaskRefName(t.getReferenceTaskName()))
&& !doWhileTaskModel.getReferenceTaskName().equals(t.getReferenceTaskName())
&& doWhileTaskModel.getIteration() == t.getIteration()) {
relevantTask = relevantTasks.get(t.getReferenceTaskName());
if (relevantTask == null || t.getRetryCount() > relevantTask.getRetryCount()) {
relevantTasks.put(t.getReferenceTaskName(), t);
}
}
}
Collection<TaskModel> loopOverTasks = relevantTasks.values();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Workflow {} waiting for tasks {} to complete iteration {}",
workflow.getWorkflowId(),
loopOverTasks.stream()
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toList()),
doWhileTaskModel.getIteration());
}
// if the loopOverTasks collection is empty, no tasks inside the loop have been scheduled.
// so schedule it and exit the method.
if (loopOverTasks.isEmpty()) {
doWhileTaskModel.setIteration(1);
doWhileTaskModel.addOutput("iteration", doWhileTaskModel.getIteration());
return scheduleNextIteration(doWhileTaskModel, workflow, workflowExecutor);
}
for (TaskModel loopOverTask : loopOverTasks) {
TaskModel.Status taskStatus = loopOverTask.getStatus();
hasFailures = !taskStatus.isSuccessful();
if (hasFailures) {
failureReason.append(loopOverTask.getReasonForIncompletion()).append(" ");
}
output.put(
TaskUtils.removeIterationFromTaskRefName(loopOverTask.getReferenceTaskName()),
loopOverTask.getOutputData());
if (hasFailures) {
break;
}
}
doWhileTaskModel.addOutput(String.valueOf(doWhileTaskModel.getIteration()), output);
if (hasFailures) {
LOGGER.debug(
"Task {} failed in {} iteration",
doWhileTaskModel.getTaskId(),
doWhileTaskModel.getIteration() + 1);
return markTaskFailure(
doWhileTaskModel, TaskModel.Status.FAILED, failureReason.toString());
}
if (!isIterationComplete(doWhileTaskModel, relevantTasks)) {
// current iteration is not complete (all tasks inside the loop are not terminal)
return false;
}
// if we are here, the iteration is complete, and we need to check if there is a next
// iteration by evaluating the loopCondition
boolean shouldContinue;
try {
shouldContinue = evaluateCondition(workflow, doWhileTaskModel);
LOGGER.debug(
"Task {} condition evaluated to {}",
doWhileTaskModel.getTaskId(),
shouldContinue);
if (shouldContinue) {
doWhileTaskModel.setIteration(doWhileTaskModel.getIteration() + 1);
doWhileTaskModel.addOutput("iteration", doWhileTaskModel.getIteration());
return scheduleNextIteration(doWhileTaskModel, workflow, workflowExecutor);
} else {
LOGGER.debug(
"Task {} took {} iterations to complete",
doWhileTaskModel.getTaskId(),
doWhileTaskModel.getIteration() + 1);
return markTaskSuccess(doWhileTaskModel);
}
} catch (ScriptException e) {
String message =
String.format(
"Unable to evaluate condition %s, exception %s",
doWhileTaskModel.getWorkflowTask().getLoopCondition(), e.getMessage());
LOGGER.error(message);
return markTaskFailure(
doWhileTaskModel, TaskModel.Status.FAILED_WITH_TERMINAL_ERROR, message);
}
}
/**
* Check if all tasks in the current iteration have reached terminal state.
*
* @param doWhileTaskModel The {@link TaskModel} of DO_WHILE.
* @param referenceNameToModel Map of taskReferenceName to {@link TaskModel}.
* @return true if all tasks in DO_WHILE.loopOver are in <code>referenceNameToModel</code> and
* reached terminal state.
*/
private boolean isIterationComplete(
TaskModel doWhileTaskModel, Map<String, TaskModel> referenceNameToModel) {
List<WorkflowTask> workflowTasksInsideDoWhile =
doWhileTaskModel.getWorkflowTask().getLoopOver();
int iteration = doWhileTaskModel.getIteration();
boolean allTasksTerminal = true;
for (WorkflowTask workflowTaskInsideDoWhile : workflowTasksInsideDoWhile) {
String taskReferenceName =
TaskUtils.appendIteration(
workflowTaskInsideDoWhile.getTaskReferenceName(), iteration);
if (referenceNameToModel.containsKey(taskReferenceName)) {
TaskModel taskModel = referenceNameToModel.get(taskReferenceName);
if (!taskModel.getStatus().isTerminal()) {
allTasksTerminal = false;
break;
}
} else {
allTasksTerminal = false;
break;
}
}
if (!allTasksTerminal) {
// Cases where tasks directly inside loop over are not completed.
// loopOver -> [task1 -> COMPLETED, task2 -> IN_PROGRESS]
return false;
}
// Check all the tasks in referenceNameToModel are completed or not. These are set of tasks
// which are not directly inside loopOver tasks, but they are under hierarchy
// loopOver -> [decisionTask -> COMPLETED [ task1 -> COMPLETED, task2 -> IN_PROGRESS]]
return referenceNameToModel.values().stream()
.noneMatch(taskModel -> !taskModel.getStatus().isTerminal());
}
boolean scheduleNextIteration(
TaskModel doWhileTaskModel, WorkflowModel workflow, WorkflowExecutor workflowExecutor) {
LOGGER.debug(
"Scheduling loop tasks for task {} as condition {} evaluated to true",
doWhileTaskModel.getTaskId(),
doWhileTaskModel.getWorkflowTask().getLoopCondition());
workflowExecutor.scheduleNextIteration(doWhileTaskModel, workflow);
return true; // Return true even though status not changed. Iteration has to be updated in
// execution DAO.
}
boolean markTaskFailure(TaskModel taskModel, TaskModel.Status status, String failureReason) {
LOGGER.error("Marking task {} failed with error.", taskModel.getTaskId());
taskModel.setReasonForIncompletion(failureReason);
taskModel.setStatus(status);
return true;
}
boolean markTaskSuccess(TaskModel taskModel) {
LOGGER.debug(
"Task {} took {} iterations to complete",
taskModel.getTaskId(),
taskModel.getIteration() + 1);
taskModel.setStatus(TaskModel.Status.COMPLETED);
return true;
}
@VisibleForTesting
boolean evaluateCondition(WorkflowModel workflow, TaskModel task) throws ScriptException {
TaskDef taskDefinition = task.getTaskDefinition().orElse(null);
// Use paramUtils to compute the task input
Map<String, Object> conditionInput =
parametersUtils.getTaskInputV2(
task.getWorkflowTask().getInputParameters(),
workflow,
task.getTaskId(),
taskDefinition);
conditionInput.put(task.getReferenceTaskName(), task.getOutputData());
List<TaskModel> loopOver =
workflow.getTasks().stream()
.filter(
t ->
(task.getWorkflowTask()
.has(
TaskUtils
.removeIterationFromTaskRefName(
t
.getReferenceTaskName()))
&& !task.getReferenceTaskName()
.equals(t.getReferenceTaskName())))
.collect(Collectors.toList());
for (TaskModel loopOverTask : loopOver) {
conditionInput.put(
TaskUtils.removeIterationFromTaskRefName(loopOverTask.getReferenceTaskName()),
loopOverTask.getOutputData());
}
String condition = task.getWorkflowTask().getLoopCondition();
boolean result = false;
if (condition != null) {
LOGGER.debug("Condition: {} is being evaluated", condition);
// Evaluate the expression by using the Nashorn based script evaluator
result = ScriptEvaluator.evalBool(condition, conditionInput);
}
return result;
}
}
| 6,686 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Fork.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import org.springframework.stereotype.Component;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK;
@Component(TASK_TYPE_FORK)
public class Fork extends WorkflowSystemTask {
public Fork() {
super(TASK_TYPE_FORK);
}
}
| 6,687 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducer.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.service.MetadataService;
import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER;
@Component
@ConditionalOnProperty(
name = "conductor.system-task-workers.enabled",
havingValue = "true",
matchIfMissing = true)
public class IsolatedTaskQueueProducer {
private static final Logger LOGGER = LoggerFactory.getLogger(IsolatedTaskQueueProducer.class);
private final MetadataService metadataService;
private final Set<WorkflowSystemTask> asyncSystemTasks;
private final SystemTaskWorker systemTaskWorker;
private final Set<String> listeningQueues = new HashSet<>();
public IsolatedTaskQueueProducer(
MetadataService metadataService,
@Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set<WorkflowSystemTask> asyncSystemTasks,
SystemTaskWorker systemTaskWorker,
@Value("${conductor.app.isolatedSystemTaskEnabled:false}")
boolean isolatedSystemTaskEnabled,
@Value("${conductor.app.isolatedSystemTaskQueuePollInterval:10s}")
Duration isolatedSystemTaskQueuePollInterval) {
this.metadataService = metadataService;
this.asyncSystemTasks = asyncSystemTasks;
this.systemTaskWorker = systemTaskWorker;
if (isolatedSystemTaskEnabled) {
LOGGER.info("Listening for isolation groups");
Executors.newSingleThreadScheduledExecutor()
.scheduleWithFixedDelay(
this::addTaskQueues,
1000,
isolatedSystemTaskQueuePollInterval.toMillis(),
TimeUnit.MILLISECONDS);
} else {
LOGGER.info("Isolated System Task Worker DISABLED");
}
}
private Set<TaskDef> getIsolationExecutionNameSpaces() {
Set<TaskDef> isolationExecutionNameSpaces = Collections.emptySet();
try {
List<TaskDef> taskDefs = metadataService.getTaskDefs();
isolationExecutionNameSpaces =
taskDefs.stream()
.filter(
taskDef ->
StringUtils.isNotBlank(taskDef.getIsolationGroupId())
|| StringUtils.isNotBlank(
taskDef.getExecutionNameSpace()))
.collect(Collectors.toSet());
} catch (RuntimeException e) {
LOGGER.error(
"Unknown exception received in getting isolation groups, sleeping and retrying",
e);
}
return isolationExecutionNameSpaces;
}
@VisibleForTesting
void addTaskQueues() {
Set<TaskDef> isolationTaskDefs = getIsolationExecutionNameSpaces();
LOGGER.debug("Retrieved queues {}", isolationTaskDefs);
for (TaskDef isolatedTaskDef : isolationTaskDefs) {
for (WorkflowSystemTask systemTask : this.asyncSystemTasks) {
String taskQueue =
QueueUtils.getQueueName(
systemTask.getTaskType(),
null,
isolatedTaskDef.getIsolationGroupId(),
isolatedTaskDef.getExecutionNameSpace());
LOGGER.debug("Adding taskQueue:'{}' to system task worker coordinator", taskQueue);
if (!listeningQueues.contains(taskQueue)) {
systemTaskWorker.startPolling(systemTask, taskQueue);
listeningQueues.add(taskQueue);
}
}
}
}
}
| 6,688 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExecutionConfig.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import com.netflix.conductor.core.utils.SemaphoreUtil;
class ExecutionConfig {
private final ExecutorService executorService;
private final SemaphoreUtil semaphoreUtil;
ExecutionConfig(int threadCount, String threadNameFormat) {
this.executorService =
Executors.newFixedThreadPool(
threadCount,
new BasicThreadFactory.Builder().namingPattern(threadNameFormat).build());
this.semaphoreUtil = new SemaphoreUtil(threadCount);
}
public ExecutorService getExecutorService() {
return executorService;
}
public SemaphoreUtil getSemaphoreUtil() {
return semaphoreUtil;
}
}
| 6,689 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.core.events.EventQueues;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.events.queue.ObservableQueue;
import com.netflix.conductor.core.exception.NonTransientException;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_EVENT;
@Component(TASK_TYPE_EVENT)
public class Event extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(Event.class);
public static final String NAME = "EVENT";
private static final String EVENT_PRODUCED = "event_produced";
private final ObjectMapper objectMapper;
private final ParametersUtils parametersUtils;
private final EventQueues eventQueues;
public Event(
EventQueues eventQueues, ParametersUtils parametersUtils, ObjectMapper objectMapper) {
super(TASK_TYPE_EVENT);
this.parametersUtils = parametersUtils;
this.eventQueues = eventQueues;
this.objectMapper = objectMapper;
}
@Override
public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
Map<String, Object> payload = new HashMap<>(task.getInputData());
payload.put("workflowInstanceId", workflow.getWorkflowId());
payload.put("workflowType", workflow.getWorkflowName());
payload.put("workflowVersion", workflow.getWorkflowVersion());
payload.put("correlationId", workflow.getCorrelationId());
task.setStatus(TaskModel.Status.IN_PROGRESS);
task.addOutput(payload);
try {
task.addOutput(EVENT_PRODUCED, computeQueueName(workflow, task));
} catch (Exception e) {
task.setStatus(TaskModel.Status.FAILED);
task.setReasonForIncompletion(e.getMessage());
LOGGER.error(
"Error executing task: {}, workflow: {}",
task.getTaskId(),
workflow.getWorkflowId(),
e);
}
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
try {
String queueName = (String) task.getOutputData().get(EVENT_PRODUCED);
ObservableQueue queue = getQueue(queueName, task.getTaskId());
Message message = getPopulatedMessage(task);
queue.publish(List.of(message));
LOGGER.debug("Published message:{} to queue:{}", message.getId(), queue.getName());
if (!isAsyncComplete(task)) {
task.setStatus(TaskModel.Status.COMPLETED);
return true;
}
} catch (JsonProcessingException jpe) {
task.setStatus(TaskModel.Status.FAILED);
task.setReasonForIncompletion("Error serializing JSON payload: " + jpe.getMessage());
LOGGER.error(
"Error serializing JSON payload for task: {}, workflow: {}",
task.getTaskId(),
workflow.getWorkflowId());
} catch (Exception e) {
task.setStatus(TaskModel.Status.FAILED);
task.setReasonForIncompletion(e.getMessage());
LOGGER.error(
"Error executing task: {}, workflow: {}",
task.getTaskId(),
workflow.getWorkflowId(),
e);
}
return false;
}
@Override
public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
Message message = new Message(task.getTaskId(), null, task.getTaskId());
String queueName = computeQueueName(workflow, task);
ObservableQueue queue = getQueue(queueName, task.getTaskId());
queue.ack(List.of(message));
}
@VisibleForTesting
String computeQueueName(WorkflowModel workflow, TaskModel task) {
String sinkValueRaw = (String) task.getInputData().get("sink");
Map<String, Object> input = new HashMap<>();
input.put("sink", sinkValueRaw);
Map<String, Object> replaced =
parametersUtils.getTaskInputV2(input, workflow, task.getTaskId(), null);
String sinkValue = (String) replaced.get("sink");
String queueName = sinkValue;
if (sinkValue.startsWith("conductor")) {
if ("conductor".equals(sinkValue)) {
queueName =
sinkValue
+ ":"
+ workflow.getWorkflowName()
+ ":"
+ task.getReferenceTaskName();
} else if (sinkValue.startsWith("conductor:")) {
queueName =
"conductor:"
+ workflow.getWorkflowName()
+ ":"
+ sinkValue.replaceAll("conductor:", "");
} else {
throw new IllegalStateException(
"Invalid / Unsupported sink specified: " + sinkValue);
}
}
return queueName;
}
@VisibleForTesting
ObservableQueue getQueue(String queueName, String taskId) {
try {
return eventQueues.getQueue(queueName);
} catch (IllegalArgumentException e) {
throw new IllegalStateException(
"Error loading queue:"
+ queueName
+ ", for task:"
+ taskId
+ ", error: "
+ e.getMessage());
} catch (Exception e) {
throw new NonTransientException("Unable to find queue name for task " + taskId);
}
}
Message getPopulatedMessage(TaskModel task) throws JsonProcessingException {
String payloadJson = objectMapper.writeValueAsString(task.getOutputData());
return new Message(task.getTaskId(), payloadJson, task.getTaskId());
}
}
| 6,690 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Human.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HUMAN;
import static com.netflix.conductor.model.TaskModel.Status.IN_PROGRESS;
@Component(TASK_TYPE_HUMAN)
public class Human extends WorkflowSystemTask {
public Human() {
super(TASK_TYPE_HUMAN);
}
@Override
public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
task.setStatus(IN_PROGRESS);
}
@Override
public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
task.setStatus(TaskModel.Status.CANCELED);
}
}
| 6,691 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Noop.java | /*
* Copyright 2023 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_NOOP;
@Component(TASK_TYPE_NOOP)
public class Noop extends WorkflowSystemTask {
public Noop() {
super(TASK_TYPE_NOOP);
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
task.setStatus(TaskModel.Status.COMPLETED);
return true;
}
}
| 6,692 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/StartWorkflow.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import javax.validation.Validator;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.operation.StartWorkflowOperation;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_START_WORKFLOW;
import static com.netflix.conductor.model.TaskModel.Status.COMPLETED;
import static com.netflix.conductor.model.TaskModel.Status.FAILED;
@Component(TASK_TYPE_START_WORKFLOW)
public class StartWorkflow extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(StartWorkflow.class);
private static final String WORKFLOW_ID = "workflowId";
private static final String START_WORKFLOW_PARAMETER = "startWorkflow";
private final ObjectMapper objectMapper;
private final Validator validator;
private final StartWorkflowOperation startWorkflowOperation;
public StartWorkflow(
ObjectMapper objectMapper,
Validator validator,
StartWorkflowOperation startWorkflowOperation) {
super(TASK_TYPE_START_WORKFLOW);
this.objectMapper = objectMapper;
this.validator = validator;
this.startWorkflowOperation = startWorkflowOperation;
}
@Override
public void start(
WorkflowModel workflow, TaskModel taskModel, WorkflowExecutor workflowExecutor) {
StartWorkflowRequest request = getRequest(taskModel);
if (request == null) {
return;
}
if (request.getTaskToDomain() == null || request.getTaskToDomain().isEmpty()) {
Map<String, String> workflowTaskToDomainMap = workflow.getTaskToDomain();
if (workflowTaskToDomainMap != null) {
request.setTaskToDomain(new HashMap<>(workflowTaskToDomainMap));
}
}
// set the correlation id of starter workflow, if its empty in the StartWorkflowRequest
request.setCorrelationId(
StringUtils.defaultIfBlank(
request.getCorrelationId(), workflow.getCorrelationId()));
try {
String workflowId = startWorkflow(request, workflow.getWorkflowId());
taskModel.addOutput(WORKFLOW_ID, workflowId);
taskModel.setStatus(COMPLETED);
} catch (TransientException te) {
LOGGER.info(
"A transient backend error happened when task {} in {} tried to start workflow {}.",
taskModel.getTaskId(),
workflow.toShortString(),
request.getName());
} catch (Exception ae) {
taskModel.setStatus(FAILED);
taskModel.setReasonForIncompletion(ae.getMessage());
LOGGER.error(
"Error starting workflow: {} from workflow: {}",
request.getName(),
workflow.toShortString(),
ae);
}
}
private StartWorkflowRequest getRequest(TaskModel taskModel) {
Map<String, Object> taskInput = taskModel.getInputData();
StartWorkflowRequest startWorkflowRequest = null;
if (taskInput.get(START_WORKFLOW_PARAMETER) == null) {
taskModel.setStatus(FAILED);
taskModel.setReasonForIncompletion(
"Missing '" + START_WORKFLOW_PARAMETER + "' in input data.");
} else {
try {
startWorkflowRequest =
objectMapper.convertValue(
taskInput.get(START_WORKFLOW_PARAMETER),
StartWorkflowRequest.class);
var violations = validator.validate(startWorkflowRequest);
if (!violations.isEmpty()) {
StringBuilder reasonForIncompletion =
new StringBuilder(START_WORKFLOW_PARAMETER)
.append(" validation failed. ");
for (var violation : violations) {
reasonForIncompletion
.append("'")
.append(violation.getPropertyPath().toString())
.append("' -> ")
.append(violation.getMessage())
.append(". ");
}
taskModel.setStatus(FAILED);
taskModel.setReasonForIncompletion(reasonForIncompletion.toString());
startWorkflowRequest = null;
}
} catch (IllegalArgumentException e) {
LOGGER.error("Error reading StartWorkflowRequest for {}", taskModel, e);
taskModel.setStatus(FAILED);
taskModel.setReasonForIncompletion(
"Error reading StartWorkflowRequest. " + e.getMessage());
}
}
return startWorkflowRequest;
}
private String startWorkflow(StartWorkflowRequest request, String workflowId) {
StartWorkflowInput input = new StartWorkflowInput(request);
input.setTriggeringWorkflowId(workflowId);
return startWorkflowOperation.execute(input);
}
@Override
public boolean isAsync() {
return true;
}
}
| 6,693 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Inline.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_INLINE;
/**
* @author X-Ultra
* <p>Task that enables execute inline script at workflow execution. For example,
* <pre>
* ...
* {
* "tasks": [
* {
* "name": "INLINE",
* "taskReferenceName": "inline_test",
* "type": "INLINE",
* "inputParameters": {
* "input": "${workflow.input}",
* "evaluatorType": "javascript"
* "expression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }"
* }
* }
* ]
* }
* ...
* </pre>
* then to use task output, e.g. <code>script_test.output.testvalue</code> {@link Inline} is a
* replacement for deprecated {@link Lambda}
*/
@Component(TASK_TYPE_INLINE)
public class Inline extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(Inline.class);
private static final String QUERY_EVALUATOR_TYPE = "evaluatorType";
private static final String QUERY_EXPRESSION_PARAMETER = "expression";
public static final String NAME = "INLINE";
private final Map<String, Evaluator> evaluators;
public Inline(Map<String, Evaluator> evaluators) {
super(TASK_TYPE_INLINE);
this.evaluators = evaluators;
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
Map<String, Object> taskInput = task.getInputData();
String evaluatorType = (String) taskInput.get(QUERY_EVALUATOR_TYPE);
String expression = (String) taskInput.get(QUERY_EXPRESSION_PARAMETER);
try {
checkEvaluatorType(evaluatorType);
checkExpression(expression);
Evaluator evaluator = evaluators.get(evaluatorType);
Object evalResult = evaluator.evaluate(expression, taskInput);
task.addOutput("result", evalResult);
task.setStatus(TaskModel.Status.COMPLETED);
} catch (Exception e) {
String errorMessage = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
LOGGER.error(
"Failed to execute Inline Task: {} in workflow: {}",
task.getTaskId(),
workflow.getWorkflowId(),
e);
// TerminateWorkflowException is thrown when the script evaluation fails
// Retry will result in the same error, so FAILED_WITH_TERMINAL_ERROR status is used.
task.setStatus(
e instanceof TerminateWorkflowException
? TaskModel.Status.FAILED_WITH_TERMINAL_ERROR
: TaskModel.Status.FAILED);
task.setReasonForIncompletion(errorMessage);
task.addOutput("error", errorMessage);
}
return true;
}
private void checkEvaluatorType(String evaluatorType) {
if (StringUtils.isBlank(evaluatorType)) {
LOGGER.error("Empty {} in INLINE task. ", QUERY_EVALUATOR_TYPE);
throw new TerminateWorkflowException(
"Empty '"
+ QUERY_EVALUATOR_TYPE
+ "' in INLINE task's input parameters. A non-empty String value must be provided.");
}
if (evaluators.get(evaluatorType) == null) {
LOGGER.error("Evaluator {} for INLINE task not registered", evaluatorType);
throw new TerminateWorkflowException(
"Unknown evaluator '" + evaluatorType + "' in INLINE task.");
}
}
private void checkExpression(String expression) {
if (StringUtils.isBlank(expression)) {
LOGGER.error("Empty {} in INLINE task. ", QUERY_EXPRESSION_PARAMETER);
throw new TerminateWorkflowException(
"Empty '"
+ QUERY_EXPRESSION_PARAMETER
+ "' in Inline task's input parameters. A non-empty String value must be provided.");
}
}
}
| 6,694 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/WorkflowSystemTask.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Optional;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
public abstract class WorkflowSystemTask {
private final String taskType;
public WorkflowSystemTask(String taskType) {
this.taskType = taskType;
}
/**
* Start the task execution.
*
* <p>Called only once, and first, when the task status is SCHEDULED.
*
* @param workflow Workflow for which the task is being started
* @param task Instance of the Task
* @param workflowExecutor Workflow Executor
*/
public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
// Do nothing unless overridden by the task implementation
}
/**
* "Execute" the task.
*
* <p>Called after {@link #start(WorkflowModel, TaskModel, WorkflowExecutor)}, if the task
* status is not terminal. Can be called more than once.
*
* @param workflow Workflow for which the task is being started
* @param task Instance of the Task
* @param workflowExecutor Workflow Executor
* @return true, if the execution has changed the task status. return false otherwise.
*/
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
return false;
}
/**
* Cancel task execution
*
* @param workflow Workflow for which the task is being started
* @param task Instance of the Task
* @param workflowExecutor Workflow Executor
*/
public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {}
public Optional<Long> getEvaluationOffset(TaskModel taskModel, long defaultOffset) {
return Optional.empty();
}
/**
* @return True if the task is supposed to be started asynchronously using internal queues.
*/
public boolean isAsync() {
return false;
}
/**
* @return True to keep task in 'IN_PROGRESS' state, and 'COMPLETE' later by an external
* message.
*/
public boolean isAsyncComplete(TaskModel task) {
if (task.getInputData().containsKey("asyncComplete")) {
return Optional.ofNullable(task.getInputData().get("asyncComplete"))
.map(result -> (Boolean) result)
.orElse(false);
} else {
return Optional.ofNullable(task.getWorkflowTask())
.map(WorkflowTask::isAsyncComplete)
.orElse(false);
}
}
/**
* @return name of the system task
*/
public String getTaskType() {
return taskType;
}
/**
* Default to true for retrieving tasks when retrieving workflow data. Some cases (e.g.
* subworkflows) might not need the tasks at all, and by setting this to false in that case, you
* can get a solid performance gain.
*
* @return true for retrieving tasks when getting workflow
*/
public boolean isTaskRetrievalRequired() {
return true;
}
@Override
public String toString() {
return taskType;
}
}
| 6,695 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/SetVariable.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.exception.NonTransientException;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SET_VARIABLE;
@Component(TASK_TYPE_SET_VARIABLE)
public class SetVariable extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(SetVariable.class);
private final ConductorProperties properties;
private final ObjectMapper objectMapper;
private final ExecutionDAOFacade executionDAOFacade;
public SetVariable(
ConductorProperties properties,
ObjectMapper objectMapper,
ExecutionDAOFacade executionDAOFacade) {
super(TASK_TYPE_SET_VARIABLE);
this.properties = properties;
this.objectMapper = objectMapper;
this.executionDAOFacade = executionDAOFacade;
}
private boolean validateVariablesSize(
WorkflowModel workflow, TaskModel task, Map<String, Object> variables) {
String workflowId = workflow.getWorkflowId();
long maxThreshold = properties.getMaxWorkflowVariablesPayloadSizeThreshold().toKilobytes();
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
this.objectMapper.writeValue(byteArrayOutputStream, variables);
byte[] payloadBytes = byteArrayOutputStream.toByteArray();
long payloadSize = payloadBytes.length;
if (payloadSize > maxThreshold * 1024) {
String errorMsg =
String.format(
"The variables payload size: %d of workflow: %s is greater than the permissible limit: %d bytes",
payloadSize, workflowId, maxThreshold);
LOGGER.error(errorMsg);
task.setReasonForIncompletion(errorMsg);
return false;
}
return true;
} catch (IOException e) {
LOGGER.error(
"Unable to validate variables payload size of workflow: {}", workflowId, e);
throw new NonTransientException(
"Unable to validate variables payload size of workflow: " + workflowId, e);
}
}
@Override
public boolean execute(WorkflowModel workflow, TaskModel task, WorkflowExecutor provider) {
Map<String, Object> variables = workflow.getVariables();
Map<String, Object> input = task.getInputData();
String taskId = task.getTaskId();
ArrayList<String> newKeys;
Map<String, Object> previousValues;
if (input != null && input.size() > 0) {
newKeys = new ArrayList<>();
previousValues = new HashMap<>();
input.keySet()
.forEach(
key -> {
if (variables.containsKey(key)) {
previousValues.put(key, variables.get(key));
} else {
newKeys.add(key);
}
variables.put(key, input.get(key));
LOGGER.debug(
"Task: {} setting value for variable: {}", taskId, key);
});
if (!validateVariablesSize(workflow, task, variables)) {
// restore previous variables
previousValues
.keySet()
.forEach(
key -> {
variables.put(key, previousValues.get(key));
});
newKeys.forEach(variables::remove);
task.setStatus(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR);
return true;
}
}
task.setStatus(TaskModel.Status.COMPLETED);
executionDAOFacade.updateWorkflow(workflow);
return true;
}
}
| 6,696 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorker.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.core.LifecycleAwareComponent;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.execution.AsyncSystemTaskExecutor;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.core.utils.SemaphoreUtil;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.service.ExecutionService;
/** The worker that polls and executes an async system task. */
@Component
@ConditionalOnProperty(
name = "conductor.system-task-workers.enabled",
havingValue = "true",
matchIfMissing = true)
public class SystemTaskWorker extends LifecycleAwareComponent {
private static final Logger LOGGER = LoggerFactory.getLogger(SystemTaskWorker.class);
private final long pollInterval;
private final QueueDAO queueDAO;
ExecutionConfig defaultExecutionConfig;
private final AsyncSystemTaskExecutor asyncSystemTaskExecutor;
private final ConductorProperties properties;
private final ExecutionService executionService;
ConcurrentHashMap<String, ExecutionConfig> queueExecutionConfigMap = new ConcurrentHashMap<>();
public SystemTaskWorker(
QueueDAO queueDAO,
AsyncSystemTaskExecutor asyncSystemTaskExecutor,
ConductorProperties properties,
ExecutionService executionService) {
this.properties = properties;
int threadCount = properties.getSystemTaskWorkerThreadCount();
this.defaultExecutionConfig = new ExecutionConfig(threadCount, "system-task-worker-%d");
this.asyncSystemTaskExecutor = asyncSystemTaskExecutor;
this.queueDAO = queueDAO;
this.pollInterval = properties.getSystemTaskWorkerPollInterval().toMillis();
this.executionService = executionService;
LOGGER.info("SystemTaskWorker initialized with {} threads", threadCount);
}
public void startPolling(WorkflowSystemTask systemTask) {
startPolling(systemTask, systemTask.getTaskType());
}
public void startPolling(WorkflowSystemTask systemTask, String queueName) {
Executors.newSingleThreadScheduledExecutor()
.scheduleWithFixedDelay(
() -> this.pollAndExecute(systemTask, queueName),
1000,
pollInterval,
TimeUnit.MILLISECONDS);
LOGGER.info("Started listening for task: {} in queue: {}", systemTask, queueName);
}
void pollAndExecute(WorkflowSystemTask systemTask, String queueName) {
if (!isRunning()) {
LOGGER.debug(
"{} stopped. Not polling for task: {}", getClass().getSimpleName(), systemTask);
return;
}
ExecutionConfig executionConfig = getExecutionConfig(queueName);
SemaphoreUtil semaphoreUtil = executionConfig.getSemaphoreUtil();
ExecutorService executorService = executionConfig.getExecutorService();
String taskName = QueueUtils.getTaskType(queueName);
int messagesToAcquire = semaphoreUtil.availableSlots();
try {
if (messagesToAcquire <= 0 || !semaphoreUtil.acquireSlots(messagesToAcquire)) {
// no available slots, do not poll
Monitors.recordSystemTaskWorkerPollingLimited(queueName);
return;
}
LOGGER.debug("Polling queue: {} with {} slots acquired", queueName, messagesToAcquire);
List<String> polledTaskIds = queueDAO.pop(queueName, messagesToAcquire, 200);
Monitors.recordTaskPoll(queueName);
LOGGER.debug("Polling queue:{}, got {} tasks", queueName, polledTaskIds.size());
if (polledTaskIds.size() > 0) {
// Immediately release unused slots when number of messages acquired is less than
// acquired slots
if (polledTaskIds.size() < messagesToAcquire) {
semaphoreUtil.completeProcessing(messagesToAcquire - polledTaskIds.size());
}
for (String taskId : polledTaskIds) {
if (StringUtils.isNotBlank(taskId)) {
LOGGER.debug(
"Task: {} from queue: {} being sent to the workflow executor",
taskId,
queueName);
Monitors.recordTaskPollCount(queueName, 1);
executionService.ackTaskReceived(taskId);
CompletableFuture<Void> taskCompletableFuture =
CompletableFuture.runAsync(
() -> asyncSystemTaskExecutor.execute(systemTask, taskId),
executorService);
// release permit after processing is complete
taskCompletableFuture.whenComplete(
(r, e) -> semaphoreUtil.completeProcessing(1));
} else {
semaphoreUtil.completeProcessing(1);
}
}
} else {
// no task polled, release permit
semaphoreUtil.completeProcessing(messagesToAcquire);
}
} catch (Exception e) {
// release the permit if exception is thrown during polling, because the thread would
// not be busy
semaphoreUtil.completeProcessing(messagesToAcquire);
Monitors.recordTaskPollError(taskName, e.getClass().getSimpleName());
LOGGER.error("Error polling system task in queue:{}", queueName, e);
}
}
@VisibleForTesting
ExecutionConfig getExecutionConfig(String taskQueue) {
if (!QueueUtils.isIsolatedQueue(taskQueue)) {
return this.defaultExecutionConfig;
}
return queueExecutionConfigMap.computeIfAbsent(
taskQueue, __ -> this.createExecutionConfig());
}
private ExecutionConfig createExecutionConfig() {
int threadCount = properties.getIsolatedSystemTaskWorkerThreadCount();
String threadNameFormat = "isolated-system-task-worker-%d";
return new ExecutionConfig(threadCount, threadNameFormat);
}
}
| 6,697 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.exception.NonTransientException;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.operation.StartWorkflowOperation;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW;
@Component(TASK_TYPE_SUB_WORKFLOW)
public class SubWorkflow extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(SubWorkflow.class);
private static final String SUB_WORKFLOW_ID = "subWorkflowId";
private final ObjectMapper objectMapper;
private final StartWorkflowOperation startWorkflowOperation;
public SubWorkflow(ObjectMapper objectMapper, StartWorkflowOperation startWorkflowOperation) {
super(TASK_TYPE_SUB_WORKFLOW);
this.objectMapper = objectMapper;
this.startWorkflowOperation = startWorkflowOperation;
}
@SuppressWarnings("unchecked")
@Override
public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
Map<String, Object> input = task.getInputData();
String name = input.get("subWorkflowName").toString();
int version = (int) input.get("subWorkflowVersion");
WorkflowDef workflowDefinition = null;
if (input.get("subWorkflowDefinition") != null) {
// convert the value back to workflow definition object
workflowDefinition =
objectMapper.convertValue(
input.get("subWorkflowDefinition"), WorkflowDef.class);
name = workflowDefinition.getName();
}
Map<String, String> taskToDomain = workflow.getTaskToDomain();
if (input.get("subWorkflowTaskToDomain") instanceof Map) {
taskToDomain = (Map<String, String>) input.get("subWorkflowTaskToDomain");
}
var wfInput = (Map<String, Object>) input.get("workflowInput");
if (wfInput == null || wfInput.isEmpty()) {
wfInput = input;
}
String correlationId = workflow.getCorrelationId();
try {
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setWorkflowDefinition(workflowDefinition);
startWorkflowInput.setName(name);
startWorkflowInput.setVersion(version);
startWorkflowInput.setWorkflowInput(wfInput);
startWorkflowInput.setCorrelationId(correlationId);
startWorkflowInput.setParentWorkflowId(workflow.getWorkflowId());
startWorkflowInput.setParentWorkflowTaskId(task.getTaskId());
startWorkflowInput.setTaskToDomain(taskToDomain);
String subWorkflowId = startWorkflowOperation.execute(startWorkflowInput);
task.setSubWorkflowId(subWorkflowId);
// For backwards compatibility
task.addOutput(SUB_WORKFLOW_ID, subWorkflowId);
// Set task status based on current sub-workflow status, as the status can change in
// recursion by the time we update here.
WorkflowModel subWorkflow = workflowExecutor.getWorkflow(subWorkflowId, false);
updateTaskStatus(subWorkflow, task);
} catch (TransientException te) {
LOGGER.info(
"A transient backend error happened when task {} in {} tried to start sub workflow {}.",
task.getTaskId(),
workflow.toShortString(),
name);
} catch (Exception ae) {
task.setStatus(TaskModel.Status.FAILED);
task.setReasonForIncompletion(ae.getMessage());
LOGGER.error(
"Error starting sub workflow: {} from workflow: {}",
name,
workflow.toShortString(),
ae);
}
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
String workflowId = task.getSubWorkflowId();
if (StringUtils.isEmpty(workflowId)) {
return false;
}
WorkflowModel subWorkflow = workflowExecutor.getWorkflow(workflowId, false);
WorkflowModel.Status subWorkflowStatus = subWorkflow.getStatus();
if (!subWorkflowStatus.isTerminal()) {
return false;
}
updateTaskStatus(subWorkflow, task);
return true;
}
@Override
public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
String workflowId = task.getSubWorkflowId();
if (StringUtils.isEmpty(workflowId)) {
return;
}
WorkflowModel subWorkflow = workflowExecutor.getWorkflow(workflowId, true);
subWorkflow.setStatus(WorkflowModel.Status.TERMINATED);
String reason =
StringUtils.isEmpty(workflow.getReasonForIncompletion())
? "Parent workflow has been terminated with status " + workflow.getStatus()
: "Parent workflow has been terminated with reason: "
+ workflow.getReasonForIncompletion();
workflowExecutor.terminateWorkflow(subWorkflow, reason, null);
}
@Override
public boolean isAsync() {
return true;
}
/**
* Keep Subworkflow task asyncComplete. The Subworkflow task will be executed once
* asynchronously to move to IN_PROGRESS state, and will move to termination by Subworkflow's
* completeWorkflow logic, there by avoiding periodic polling.
*
* @param task
* @return
*/
@Override
public boolean isAsyncComplete(TaskModel task) {
return true;
}
private void updateTaskStatus(WorkflowModel subworkflow, TaskModel task) {
WorkflowModel.Status status = subworkflow.getStatus();
switch (status) {
case RUNNING:
case PAUSED:
task.setStatus(TaskModel.Status.IN_PROGRESS);
break;
case COMPLETED:
task.setStatus(TaskModel.Status.COMPLETED);
break;
case FAILED:
task.setStatus(TaskModel.Status.FAILED);
break;
case TERMINATED:
task.setStatus(TaskModel.Status.CANCELED);
break;
case TIMED_OUT:
task.setStatus(TaskModel.Status.TIMED_OUT);
break;
default:
throw new NonTransientException(
"Subworkflow status does not conform to relevant task status.");
}
if (status.isTerminal()) {
if (subworkflow.getExternalOutputPayloadStoragePath() != null) {
task.setExternalOutputPayloadStoragePath(
subworkflow.getExternalOutputPayloadStoragePath());
} else {
task.addOutput(subworkflow.getOutput());
}
if (!status.isSuccessful()) {
task.setReasonForIncompletion(
String.format(
"Sub workflow %s failure reason: %s",
subworkflow.toShortString(),
subworkflow.getReasonForIncompletion()));
}
}
}
/**
* We don't need the tasks when retrieving the workflow data.
*
* @return false
*/
@Override
public boolean isTaskRetrievalRequired() {
return false;
}
}
| 6,698 |
0 | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution | Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Switch.java | /*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SWITCH;
/** {@link Switch} task is a replacement for now deprecated {@link Decision} task. */
@Component(TASK_TYPE_SWITCH)
public class Switch extends WorkflowSystemTask {
public Switch() {
super(TASK_TYPE_SWITCH);
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
task.setStatus(TaskModel.Status.COMPLETED);
return true;
}
}
| 6,699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.