repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestTerminate.java | core/src/test/java/com/netflix/conductor/core/execution/tasks/TestTerminate.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationStatusParameter;
import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationWorkflowOutputParameter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class TestTerminate {
private final WorkflowExecutor executor = mock(WorkflowExecutor.class);
@Test
public void should_fail_if_input_status_is_not_valid() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "PAUSED");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
@Test
public void should_fail_if_input_status_is_empty() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
@Test
public void should_fail_if_input_status_is_null() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), null);
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
@Test
public void should_complete_workflow_on_terminate_task_success() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
workflow.setOutput(Collections.singletonMap("output", "${task1.output.value}"));
HashMap<String, Object> expectedOutput =
new HashMap<>() {
{
put("output", "${task0.output.value}");
}
};
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "COMPLETED");
input.put(getTerminationWorkflowOutputParameter(), "${task0.output.value}");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertEquals(expectedOutput, task.getOutputData());
}
@Test
public void should_fail_workflow_on_terminate_task_success() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
workflow.setOutput(Collections.singletonMap("output", "${task1.output.value}"));
HashMap<String, Object> expectedOutput =
new HashMap<>() {
{
put("output", "${task0.output.value}");
}
};
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "FAILED");
input.put(getTerminationWorkflowOutputParameter(), "${task0.output.value}");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertEquals(expectedOutput, task.getOutputData());
}
@Test
public void should_fail_workflow_on_terminate_task_success_with_empty_output() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "FAILED");
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertTrue(task.getOutputData().isEmpty());
}
@Test
public void should_fail_workflow_on_terminate_task_success_with_resolved_output() {
WorkflowModel workflow = new WorkflowModel();
Terminate terminateTask = new Terminate();
HashMap<String, Object> expectedOutput =
new HashMap<>() {
{
put("result", 1);
}
};
Map<String, Object> input = new HashMap<>();
input.put(getTerminationStatusParameter(), "FAILED");
input.put(getTerminationWorkflowOutputParameter(), expectedOutput);
TaskModel task = new TaskModel();
task.getInputData().putAll(input);
terminateTask.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/tasks/DoWhileIntegrationTest.java | core/src/test/java/com/netflix/conductor/core/execution/tasks/DoWhileIntegrationTest.java | /*
* Copyright 2024 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Integration-style tests for DoWhile task cleanup functionality. These tests verify the
* interaction between removeIterations() and ExecutionDAOFacade using a mock that simulates
* database behavior.
*/
public class DoWhileIntegrationTest {
private DoWhile doWhile;
private ExecutionDAOFacade executionDAOFacade;
// Simulated in-memory database
private Map<String, TaskModel> taskDatabase;
@Before
public void setup() {
// Create fresh in-memory "database" for each test
taskDatabase = new ConcurrentHashMap<>();
// Create mock ExecutionDAOFacade with real behavior
executionDAOFacade = mock(ExecutionDAOFacade.class);
// Configure mock to actually remove from our simulated database
doAnswer(
invocation -> {
String taskId = invocation.getArgument(0);
taskDatabase.remove(taskId);
return null;
})
.when(executionDAOFacade)
.removeTask(anyString());
// Create real DoWhile task handler
ParametersUtils parametersUtils = new ParametersUtils(new ObjectMapper());
doWhile = new DoWhile(parametersUtils, executionDAOFacade);
}
@Test
public void testRemoveIterations_ActuallyRemovesFromDatabase() {
// Create workflow with 10 iterations (3 tasks per iteration = 30 tasks total)
WorkflowModel workflow = createAndPersistWorkflow(10, 3);
TaskModel doWhileTask = getDoWhileTask(workflow);
doWhileTask.setIteration(10);
// Verify all 30 tasks exist in "database"
assertEquals("Should have 30 tasks initially", 30, taskDatabase.size());
// Execute cleanup - keep last 3 iterations
doWhile.removeIterations(workflow, doWhileTask, 3);
// Verify only last 3 iterations remain (9 tasks)
assertEquals("Should have 9 tasks remaining", 9, taskDatabase.size());
// Verify correct iterations remain (8, 9, 10)
Set<Integer> remainingIterations =
taskDatabase.values().stream()
.map(TaskModel::getIteration)
.collect(Collectors.toSet());
assertEquals("Should keep iterations 8, 9, 10", Set.of(8, 9, 10), remainingIterations);
}
@Test
public void testRemoveIterations_WithLargeIterationCount() {
// Create workflow with 100 iterations
WorkflowModel workflow = createAndPersistWorkflow(100, 2);
TaskModel doWhileTask = getDoWhileTask(workflow);
doWhileTask.setIteration(100);
// Verify all 200 tasks exist
assertEquals(200, taskDatabase.size());
// Execute cleanup - keep last 10 iterations
doWhile.removeIterations(workflow, doWhileTask, 10);
// Verify only last 10 iterations remain (20 tasks)
assertEquals("Should have 20 tasks remaining", 20, taskDatabase.size());
// Verify correct iteration range
Set<Integer> remainingIterations =
taskDatabase.values().stream()
.map(TaskModel::getIteration)
.collect(Collectors.toSet());
for (int i = 91; i <= 100; i++) {
assertTrue("Should contain iteration " + i, remainingIterations.contains(i));
}
assertEquals("Should have exactly 10 iterations", 10, remainingIterations.size());
}
@Test
public void testRemoveIterations_DoesNotAffectOtherWorkflows() {
// Create two separate workflows
WorkflowModel workflow1 = createAndPersistWorkflow(5, 2);
WorkflowModel workflow2 = createAndPersistWorkflow(5, 2);
TaskModel doWhileTask1 = getDoWhileTask(workflow1);
doWhileTask1.setIteration(5);
String wf1Id = workflow1.getWorkflowId();
String wf2Id = workflow2.getWorkflowId();
// Count tasks for each workflow
long wf1CountBefore =
taskDatabase.values().stream()
.filter(t -> wf1Id.equals(t.getWorkflowInstanceId()))
.count();
long wf2CountBefore =
taskDatabase.values().stream()
.filter(t -> wf2Id.equals(t.getWorkflowInstanceId()))
.count();
assertEquals(10, wf1CountBefore);
assertEquals(10, wf2CountBefore);
// Execute cleanup on workflow1 only
doWhile.removeIterations(workflow1, doWhileTask1, 2);
// Count after cleanup
long wf1CountAfter =
taskDatabase.values().stream()
.filter(t -> wf1Id.equals(t.getWorkflowInstanceId()))
.count();
long wf2CountAfter =
taskDatabase.values().stream()
.filter(t -> wf2Id.equals(t.getWorkflowInstanceId()))
.count();
// Verify workflow1 tasks were removed (keeping 2 iterations = 4 tasks)
assertEquals(4, wf1CountAfter);
// Verify workflow2 tasks are unchanged
assertEquals("Workflow2 should be unaffected", 10, wf2CountAfter);
}
@Test
public void testRemoveIterations_BelowThreshold_NoRemoval() {
// Create workflow with 3 iterations
WorkflowModel workflow = createAndPersistWorkflow(3, 2);
TaskModel doWhileTask = getDoWhileTask(workflow);
doWhileTask.setIteration(3);
// Store initial count
int initialCount = taskDatabase.size();
assertEquals("Should start with 6 tasks", 6, initialCount);
// Execute cleanup with keepLastN > current iteration
doWhile.removeIterations(workflow, doWhileTask, 5);
// Verify no tasks were removed
int finalCount = taskDatabase.size();
assertEquals("Should not remove any tasks when below threshold", initialCount, finalCount);
}
@Test
public void testRemoveIterations_VerifyTasksActuallyGone() {
// Create workflow with specific task IDs we can track
WorkflowModel workflow = createAndPersistWorkflow(4, 2);
TaskModel doWhileTask = getDoWhileTask(workflow);
doWhileTask.setIteration(4);
// Get task IDs from iterations 1 and 2 (should be removed)
List<String> oldTaskIds =
workflow.getTasks().stream()
.filter(t -> t.getIteration() <= 2)
.filter(t -> !t.getTaskId().equals(doWhileTask.getTaskId()))
.map(TaskModel::getTaskId)
.collect(Collectors.toList());
assertEquals("Should have 4 old tasks", 4, oldTaskIds.size());
// Verify all old tasks exist before cleanup
for (String taskId : oldTaskIds) {
assertTrue("Task should exist before cleanup", taskDatabase.containsKey(taskId));
}
// Execute cleanup (keep last 2 iterations)
doWhile.removeIterations(workflow, doWhileTask, 2);
// Verify old tasks are actually gone from database
for (String taskId : oldTaskIds) {
assertFalse(
"Task " + taskId + " should be removed from database",
taskDatabase.containsKey(taskId));
}
// Get task IDs from iterations 3 and 4 (should remain)
List<String> recentTaskIds =
workflow.getTasks().stream()
.filter(t -> t.getIteration() >= 3)
.filter(t -> !t.getTaskId().equals(doWhileTask.getTaskId()))
.map(TaskModel::getTaskId)
.collect(Collectors.toList());
// Verify recent tasks still exist
for (String taskId : recentTaskIds) {
assertTrue("Recent task should still exist", taskDatabase.containsKey(taskId));
}
}
@Test
public void testRemoveIterations_IncrementalCleanup() {
// Create workflow with 5 iterations initially
WorkflowModel workflow = createAndPersistWorkflow(5, 2);
TaskModel doWhileTask = getDoWhileTask(workflow);
// First cleanup at iteration 5 - keep last 3
doWhileTask.setIteration(5);
doWhile.removeIterations(workflow, doWhileTask, 3);
// Should have iterations 3, 4, 5 remaining (6 tasks)
assertEquals("Should have 6 tasks after first cleanup", 6, taskDatabase.size());
Set<Integer> iterations1 =
taskDatabase.values().stream()
.map(TaskModel::getIteration)
.collect(Collectors.toSet());
assertEquals("Should have iterations 3, 4, 5", Set.of(3, 4, 5), iterations1);
// Simulate adding more iterations (6, 7, 8)
for (int iteration = 6; iteration <= 8; iteration++) {
for (int taskNum = 1; taskNum <= 2; taskNum++) {
TaskModel task = createIterationTask(workflow.getWorkflowId(), iteration, taskNum);
workflow.getTasks().add(task);
taskDatabase.put(task.getTaskId(), task);
}
}
// Second cleanup at iteration 8 - keep last 3
doWhileTask.setIteration(8);
doWhile.removeIterations(workflow, doWhileTask, 3);
// Should have iterations 6, 7, 8 remaining (6 tasks)
assertEquals("Should have 6 tasks after second cleanup", 6, taskDatabase.size());
Set<Integer> iterations2 =
taskDatabase.values().stream()
.map(TaskModel::getIteration)
.collect(Collectors.toSet());
assertEquals("Should have iterations 6, 7, 8", Set.of(6, 7, 8), iterations2);
}
// Helper methods
private WorkflowModel createAndPersistWorkflow(int iterations, int tasksPerIteration) {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("test-workflow-" + UUID.randomUUID());
List<TaskModel> allTasks = new ArrayList<>();
// Create DO_WHILE task (not stored in iteration tasks)
TaskModel doWhileTask = createDoWhileTask(workflow.getWorkflowId(), tasksPerIteration);
allTasks.add(doWhileTask);
// Create and persist tasks for each iteration
for (int iteration = 1; iteration <= iterations; iteration++) {
for (int taskNum = 1; taskNum <= tasksPerIteration; taskNum++) {
TaskModel task = createIterationTask(workflow.getWorkflowId(), iteration, taskNum);
allTasks.add(task);
// Persist task to simulated database
taskDatabase.put(task.getTaskId(), task);
}
}
workflow.setTasks(allTasks);
return workflow;
}
private TaskModel createDoWhileTask(String workflowId, int tasksPerIteration) {
TaskModel doWhileTask = new TaskModel();
doWhileTask.setTaskId("do-while-" + UUID.randomUUID());
doWhileTask.setWorkflowInstanceId(workflowId);
doWhileTask.setReferenceTaskName("doWhileTask");
doWhileTask.setTaskType("DO_WHILE");
doWhileTask.setStatus(TaskModel.Status.IN_PROGRESS);
// Create workflow task with loopOver definition
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskReferenceName("doWhileTask");
workflowTask.setType("DO_WHILE");
// Add loop over tasks
List<WorkflowTask> loopOverTasks = new ArrayList<>();
for (int i = 1; i <= tasksPerIteration; i++) {
WorkflowTask loopTask = new WorkflowTask();
loopTask.setTaskReferenceName("loopTask" + i);
loopOverTasks.add(loopTask);
}
workflowTask.setLoopOver(loopOverTasks);
// Set input parameters
Map<String, Object> inputParams = new HashMap<>();
inputParams.put("keepLastN", 3);
workflowTask.setInputParameters(inputParams);
doWhileTask.setWorkflowTask(workflowTask);
return doWhileTask;
}
private TaskModel createIterationTask(String workflowId, int iteration, int taskNum) {
TaskModel task = new TaskModel();
task.setTaskId("task-" + UUID.randomUUID());
task.setWorkflowInstanceId(workflowId);
task.setReferenceTaskName("loopTask" + taskNum + "__" + iteration);
task.setIteration(iteration);
task.setTaskType("SIMPLE");
task.setTaskDefName("loopTask" + taskNum);
task.setStatus(TaskModel.Status.COMPLETED);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskReferenceName("loopTask" + taskNum);
task.setWorkflowTask(workflowTask);
return task;
}
private TaskModel getDoWhileTask(WorkflowModel workflow) {
return workflow.getTasks().stream()
.filter(t -> "DO_WHILE".equals(t.getTaskType()))
.findFirst()
.orElseThrow(() -> new IllegalStateException("No DO_WHILE task found"));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestLambda.java | core/src/test/java/com/netflix/conductor/core/execution/tasks/TestLambda.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
/**
* @author x-ultra
*/
public class TestLambda {
private final WorkflowModel workflow = new WorkflowModel();
private final WorkflowExecutor executor = mock(WorkflowExecutor.class);
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
public void start() {
Lambda lambda = new Lambda();
Map inputObj = new HashMap();
inputObj.put("a", 1);
// test for scriptExpression == null
TaskModel task = new TaskModel();
task.getInputData().put("input", inputObj);
lambda.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
// test for normal
task = new TaskModel();
task.getInputData().put("input", inputObj);
task.getInputData().put("scriptExpression", "if ($.input.a==1){return 1}else{return 0 } ");
lambda.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertEquals(task.getOutputData().toString(), "{result=1}");
// test for scriptExpression ScriptException
task = new TaskModel();
task.getInputData().put("input", inputObj);
task.getInputData().put("scriptExpression", "if ($.a.size==1){return 1}else{return 0 } ");
lambda.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED, task.getStatus());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorker.java | core/src/test/java/com/netflix/conductor/core/execution/tasks/TestSystemTaskWorker.java | /*
* Copyright 2021 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.execution.AsyncSystemTaskExecutor;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.service.ExecutionService;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TestSystemTaskWorker {
private static final String TEST_TASK = "system_task";
private static final String ISOLATED_TASK = "system_task-isolated";
private AsyncSystemTaskExecutor asyncSystemTaskExecutor;
private ExecutionService executionService;
private QueueDAO queueDAO;
private ConductorProperties properties;
private SystemTaskWorker systemTaskWorker;
@Before
public void setUp() {
asyncSystemTaskExecutor = mock(AsyncSystemTaskExecutor.class);
executionService = mock(ExecutionService.class);
queueDAO = mock(QueueDAO.class);
properties = mock(ConductorProperties.class);
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(10);
when(properties.getIsolatedSystemTaskWorkerThreadCount()).thenReturn(10);
when(properties.getSystemTaskWorkerCallbackDuration()).thenReturn(Duration.ofSeconds(30));
when(properties.getSystemTaskWorkerPollInterval()).thenReturn(Duration.ofSeconds(30));
systemTaskWorker =
new SystemTaskWorker(
queueDAO, asyncSystemTaskExecutor, properties, executionService);
systemTaskWorker.start();
}
@After
public void tearDown() {
systemTaskWorker.queueExecutionConfigMap.clear();
systemTaskWorker.stop();
}
@Test
public void testGetExecutionConfigForSystemTask() {
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(5);
systemTaskWorker =
new SystemTaskWorker(
queueDAO, asyncSystemTaskExecutor, properties, executionService);
assertEquals(
systemTaskWorker.getExecutionConfig("").getSemaphoreUtil().availableSlots(), 5);
}
@Test
public void testGetExecutionConfigForIsolatedSystemTask() {
when(properties.getIsolatedSystemTaskWorkerThreadCount()).thenReturn(7);
systemTaskWorker =
new SystemTaskWorker(
queueDAO, asyncSystemTaskExecutor, properties, executionService);
assertEquals(
systemTaskWorker.getExecutionConfig("test-iso").getSemaphoreUtil().availableSlots(),
7);
}
@Test
public void testPollAndExecuteSystemTask() throws Exception {
when(queueDAO.pop(anyString(), anyInt(), anyInt()))
.thenReturn(Collections.singletonList("taskId"));
CountDownLatch latch = new CountDownLatch(1);
doAnswer(
invocation -> {
latch.countDown();
return null;
})
.when(asyncSystemTaskExecutor)
.execute(any(), anyString());
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
latch.await();
verify(asyncSystemTaskExecutor).execute(any(), anyString());
}
@Test
public void testBatchPollAndExecuteSystemTask() throws Exception {
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenReturn(List.of("t1", "t1"));
CountDownLatch latch = new CountDownLatch(2);
doAnswer(
invocation -> {
latch.countDown();
return null;
})
.when(asyncSystemTaskExecutor)
.execute(any(), eq("t1"));
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
latch.await();
verify(asyncSystemTaskExecutor, Mockito.times(2)).execute(any(), eq("t1"));
}
@Test
public void testPollAndExecuteIsolatedSystemTask() throws Exception {
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenReturn(List.of("isolated_taskId"));
CountDownLatch latch = new CountDownLatch(1);
doAnswer(
invocation -> {
latch.countDown();
return null;
})
.when(asyncSystemTaskExecutor)
.execute(any(), eq("isolated_taskId"));
systemTaskWorker.pollAndExecute(new IsolatedTask(), ISOLATED_TASK);
latch.await();
verify(asyncSystemTaskExecutor, Mockito.times(1)).execute(any(), eq("isolated_taskId"));
}
@Test
public void testPollException() {
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(1);
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenThrow(RuntimeException.class);
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
verify(asyncSystemTaskExecutor, Mockito.never()).execute(any(), anyString());
}
@Test
public void testBatchPollException() {
when(properties.getSystemTaskWorkerThreadCount()).thenReturn(2);
when(queueDAO.pop(anyString(), anyInt(), anyInt())).thenThrow(RuntimeException.class);
systemTaskWorker.pollAndExecute(new TestTask(), TEST_TASK);
verify(asyncSystemTaskExecutor, Mockito.never()).execute(any(), anyString());
}
static class TestTask extends WorkflowSystemTask {
public TestTask() {
super(TEST_TASK);
}
}
static class IsolatedTask extends WorkflowSystemTask {
public IsolatedTask() {
super(ISOLATED_TASK);
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/tasks/InlineTest.java | core/src/test/java/com/netflix/conductor/core/execution/tasks/InlineTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.evaluators.GraalJSEvaluator;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.execution.evaluators.ValueParamEvaluator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.mock;
public class InlineTest {
private final WorkflowModel workflow = new WorkflowModel();
private final WorkflowExecutor executor = mock(WorkflowExecutor.class);
@Test
public void testInlineTaskValidationFailures() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 1);
inputObj.put("expression", "");
inputObj.put("evaluatorType", "value-param");
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR, task.getStatus());
assertEquals(
"Empty 'expression' in Inline task's input parameters. A non-empty String value must be provided.",
task.getReasonForIncompletion());
inputObj = new HashMap<>();
inputObj.put("value", 1);
inputObj.put("expression", "value");
inputObj.put("evaluatorType", "");
task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.FAILED_WITH_TERMINAL_ERROR, task.getStatus());
assertEquals(
"Empty 'evaluatorType' in INLINE task's input parameters. A non-empty String value must be provided.",
task.getReasonForIncompletion());
}
@Test
public void testInlineValueParamExpression() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 101);
inputObj.put("expression", "value");
inputObj.put("evaluatorType", "value-param");
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(101, task.getOutputData().get("result"));
inputObj = new HashMap<>();
inputObj.put("value", "StringValue");
inputObj.put("expression", "value");
inputObj.put("evaluatorType", "value-param");
task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals("StringValue", task.getOutputData().get("result"));
}
@SuppressWarnings("unchecked")
@Test
public void testInlineJavascriptExpression() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 101);
inputObj.put(
"expression",
"function e() { if ($.value == 101){return {\"evalResult\": true}} else { return {\"evalResult\": false}}} e();");
inputObj.put("evaluatorType", "javascript");
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(
true, ((Map<String, Object>) task.getOutputData().get("result")).get("evalResult"));
inputObj = new HashMap<>();
inputObj.put("value", "StringValue");
inputObj.put(
"expression",
"function e() { if ($.value == 'StringValue'){return {\"evalResult\": true}} else { return {\"evalResult\": false}}} e();");
inputObj.put("evaluatorType", "javascript");
task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(
true, ((Map<String, Object>) task.getOutputData().get("result")).get("evalResult"));
}
@SuppressWarnings("unchecked")
@Test
public void testInlineGraalJSEvaluatorType() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 42);
inputObj.put(
"expression",
"function e() { if ($.value == 42){return {\"evalResult\": true}} else { return {\"evalResult\": false}}} e();");
inputObj.put("evaluatorType", "graaljs");
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(
true, ((Map<String, Object>) task.getOutputData().get("result")).get("evalResult"));
}
@Test
public void testInlineDefaultEvaluatorType() {
Inline inline = new Inline(getStringEvaluatorMap());
Map<String, Object> inputObj = new HashMap<>();
inputObj.put("value", 99);
inputObj.put("expression", "function e() { return {\"result\": $.value * 2}} e();");
// No evaluatorType specified - should default to "javascript"
TaskModel task = new TaskModel();
task.getInputData().putAll(inputObj);
inline.execute(workflow, task, executor);
assertEquals(TaskModel.Status.COMPLETED, task.getStatus());
assertNull(task.getReasonForIncompletion());
assertEquals(198, ((Map<String, Object>) task.getOutputData().get("result")).get("result"));
}
private Map<String, Evaluator> getStringEvaluatorMap() {
Map<String, Evaluator> evaluators = new HashMap<>();
evaluators.put(ValueParamEvaluator.NAME, new ValueParamEvaluator());
evaluators.put(JavascriptEvaluator.NAME, new JavascriptEvaluator());
evaluators.put(GraalJSEvaluator.NAME, new GraalJSEvaluator());
return evaluators;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/evaluators/JavascriptEvaluatorTest.java | core/src/test/java/com/netflix/conductor/core/execution/evaluators/JavascriptEvaluatorTest.java | /*
* Copyright 2025 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.evaluators;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
public class JavascriptEvaluatorTest {
private final JavascriptEvaluator evaluator = new JavascriptEvaluator();
@Test
public void testBasicEvaluation() {
Map<String, Object> input = new HashMap<>();
input.put("value", 42);
String expression = "$.value * 2";
Object result = evaluator.evaluate(expression, input);
assertEquals(84, ((Number) result).intValue());
}
@Test
public void testDeepCopyProtection() {
// This test verifies the deep copy protection feature from Enterprise
Map<String, Object> input = new HashMap<>();
Map<String, Object> nested = new HashMap<>();
nested.put("original", "value");
input.put("data", nested);
// Script that modifies the input
String expression =
"""
(function() {
$.data.newKey = 'new value';
return $.data.newKey;
})()
""";
Object result = evaluator.evaluate(expression, input);
assertEquals("new value", result);
// Verify original input was NOT modified (deep copy protection)
assertFalse(
"Original input should not be modified due to deep copy",
nested.containsKey("newKey"));
assertEquals("value", nested.get("original"));
}
@Test
public void testNestedObjectAccess() {
Map<String, Object> input = new HashMap<>();
Map<String, Object> level1 = new HashMap<>();
Map<String, Object> level2 = new HashMap<>();
level2.put("value", "deep");
level1.put("level2", level2);
input.put("level1", level1);
String expression = "$.level1.level2.value";
Object result = evaluator.evaluate(expression, input);
assertEquals("deep", result);
}
@Test
public void testComplexExpression() {
Map<String, Object> input = new HashMap<>();
input.put("a", 10);
input.put("b", 20);
input.put("c", 30);
String expression = "($.a + $.b) * $.c";
Object result = evaluator.evaluate(expression, input);
assertEquals(900, ((Number) result).intValue());
}
@Test
public void testES6Features() {
Map<String, Object> input = new HashMap<>();
input.put("name", "Conductor");
String expression =
"""
(function() {
const greeting = 'Hello';
return `${greeting}, ${$.name}!`;
})()
""";
Object result = evaluator.evaluate(expression, input);
assertEquals("Hello, Conductor!", result);
}
@Test
public void testArrayOperations() {
Map<String, Object> input = new HashMap<>();
input.put("numbers", new int[] {1, 2, 3, 4, 5});
String expression = "$.numbers.filter(n => n > 2).map(n => n * 2)";
Object result = evaluator.evaluate(expression, input);
assertTrue(result instanceof java.util.List);
java.util.List<?> resultList = (java.util.List<?>) result;
assertEquals(3, resultList.size());
assertEquals(6, ((Number) resultList.get(0)).intValue());
assertEquals(8, ((Number) resultList.get(1)).intValue());
assertEquals(10, ((Number) resultList.get(2)).intValue());
}
@Test
public void testObjectReturn() {
Map<String, Object> input = new HashMap<>();
input.put("value", 42);
String expression =
"""
(function() {
return {
result: $.value,
doubled: $.value * 2,
message: 'success'
};
})()
""";
Object result = evaluator.evaluate(expression, input);
assertTrue(result instanceof Map);
@SuppressWarnings("unchecked")
Map<String, Object> resultMap = (Map<String, Object>) result;
assertEquals(42, ((Number) resultMap.get("result")).intValue());
assertEquals(84, ((Number) resultMap.get("doubled")).intValue());
assertEquals("success", resultMap.get("message"));
}
@Test
public void testNullSafety() {
Map<String, Object> input = new HashMap<>();
input.put("value", null);
String expression = "$.value === null ? 'null value' : $.value";
Object result = evaluator.evaluate(expression, input);
assertEquals("null value", result);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/evaluators/GraalJSEvaluatorTest.java | core/src/test/java/com/netflix/conductor/core/execution/evaluators/GraalJSEvaluatorTest.java | /*
* Copyright 2025 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.evaluators;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Test for GraalJSEvaluator - verifies it works identically to JavascriptEvaluator since they use
* the same underlying GraalJS engine.
*/
public class GraalJSEvaluatorTest {
private final GraalJSEvaluator evaluator = new GraalJSEvaluator();
@Test
public void testBasicEvaluation() {
Map<String, Object> input = new HashMap<>();
input.put("value", 42);
String expression = "$.value * 2";
Object result = evaluator.evaluate(expression, input);
assertEquals(84, ((Number) result).intValue());
}
@Test
public void testES6Support() {
Map<String, Object> input = new HashMap<>();
input.put("name", "GraalJS");
String expression =
"""
(function() {
const greeting = 'Hello';
let engine = $.name;
return `${greeting}, ${engine}!`;
})()
""";
Object result = evaluator.evaluate(expression, input);
assertEquals("Hello, GraalJS!", result);
}
@Test
public void testDeepCopyProtection() {
// GraalJSEvaluator should have the same deep copy protection as JavascriptEvaluator
Map<String, Object> input = new HashMap<>();
Map<String, Object> nested = new HashMap<>();
nested.put("original", "value");
input.put("data", nested);
String expression =
"""
(function() {
$.data.modified = 'new value';
return $.data.modified;
})()
""";
Object result = evaluator.evaluate(expression, input);
assertEquals("new value", result);
// Verify original input was NOT modified
assertFalse(
"Original input should not be modified due to deep copy",
nested.containsKey("modified"));
}
@Test
public void testComplexObject() {
Map<String, Object> input = new HashMap<>();
Map<String, Object> config = new HashMap<>();
config.put("timeout", 4);
config.put("retries", 3);
input.put("config", config);
String expression = "$.config.timeout * $.config.retries";
Object result = evaluator.evaluate(expression, input);
assertEquals(12, ((Number) result).intValue());
}
@Test
public void testArrayOperations() {
Map<String, Object> input = new HashMap<>();
input.put("values", new int[] {10, 20, 30, 40, 50});
String expression = "$.values.reduce((sum, val) => sum + val, 0)";
Object result = evaluator.evaluate(expression, input);
assertEquals(150, ((Number) result).intValue());
}
@Test
public void testConditionalLogic() {
Map<String, Object> input = new HashMap<>();
input.put("status", "COMPLETED");
String expression =
"""
(function() {
if ($.status === 'COMPLETED') {
return { success: true, message: 'Task completed' };
} else {
return { success: false, message: 'Task pending' };
}
})()
""";
Object result = evaluator.evaluate(expression, input);
assertTrue(result instanceof Map);
@SuppressWarnings("unchecked")
Map<String, Object> resultMap = (Map<String, Object>) result;
assertTrue((Boolean) resultMap.get("success"));
assertEquals("Task completed", resultMap.get("message"));
}
@Test
public void testIdenticalToJavascriptEvaluator() {
// Verify GraalJSEvaluator produces identical results to JavascriptEvaluator
JavascriptEvaluator jsEval = new JavascriptEvaluator();
GraalJSEvaluator graalEval = new GraalJSEvaluator();
Map<String, Object> input = new HashMap<>();
input.put("a", 5);
input.put("b", 10);
String expression = "$.a + $.b";
Object jsResult = jsEval.evaluate(expression, input);
Object graalResult = graalEval.evaluate(expression, input);
assertEquals(
"Results should be identical",
((Number) jsResult).intValue(),
((Number) graalResult).intValue());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SubWorkflowTaskMapperTest {
private SubWorkflowTaskMapper subWorkflowTaskMapper;
private ParametersUtils parametersUtils;
private DeciderService deciderService;
private IDGenerator idGenerator;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
subWorkflowTaskMapper = new SubWorkflowTaskMapper(parametersUtils, metadataDAO);
deciderService = mock(DeciderService.class);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
WorkflowTask workflowTask = new WorkflowTask();
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName("Foo");
subWorkflowParams.setVersion(2);
workflowTask.setSubWorkflowParam(subWorkflowParams);
workflowTask.setStartDelay(30);
Map<String, Object> taskInput = new HashMap<>();
Map<String, String> taskToDomain =
new HashMap<>() {
{
put("*", "unittest");
}
};
Map<String, Object> subWorkflowParamMap = new HashMap<>();
subWorkflowParamMap.put("name", "FooWorkFlow");
subWorkflowParamMap.put("version", 2);
subWorkflowParamMap.put("taskToDomain", taskToDomain);
when(parametersUtils.getTaskInputV2(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(subWorkflowParamMap);
// When
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
List<TaskModel> mappedTasks = subWorkflowTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertFalse(mappedTasks.isEmpty());
assertEquals(1, mappedTasks.size());
TaskModel subWorkFlowTask = mappedTasks.get(0);
assertEquals(TaskModel.Status.SCHEDULED, subWorkFlowTask.getStatus());
assertEquals(TASK_TYPE_SUB_WORKFLOW, subWorkFlowTask.getTaskType());
assertEquals(30, subWorkFlowTask.getCallbackAfterSeconds());
assertEquals(taskToDomain, subWorkFlowTask.getInputData().get("subWorkflowTaskToDomain"));
}
@Test
public void testTaskToDomain() {
// Given
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
WorkflowTask workflowTask = new WorkflowTask();
Map<String, String> taskToDomain =
new HashMap<>() {
{
put("*", "unittest");
}
};
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName("Foo");
subWorkflowParams.setVersion(2);
subWorkflowParams.setTaskToDomain(taskToDomain);
workflowTask.setSubWorkflowParam(subWorkflowParams);
Map<String, Object> taskInput = new HashMap<>();
Map<String, Object> subWorkflowParamMap = new HashMap<>();
subWorkflowParamMap.put("name", "FooWorkFlow");
subWorkflowParamMap.put("version", 2);
when(parametersUtils.getTaskInputV2(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(subWorkflowParamMap);
// When
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(new IDGenerator().generate())
.withDeciderService(deciderService)
.build();
List<TaskModel> mappedTasks = subWorkflowTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertFalse(mappedTasks.isEmpty());
assertEquals(1, mappedTasks.size());
TaskModel subWorkFlowTask = mappedTasks.get(0);
assertEquals(TaskModel.Status.SCHEDULED, subWorkFlowTask.getStatus());
assertEquals(TASK_TYPE_SUB_WORKFLOW, subWorkFlowTask.getTaskType());
}
@Test
public void getSubWorkflowParams() {
WorkflowTask workflowTask = new WorkflowTask();
SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
subWorkflowParams.setName("Foo");
subWorkflowParams.setVersion(2);
workflowTask.setSubWorkflowParam(subWorkflowParams);
assertEquals(subWorkflowParams, subWorkflowTaskMapper.getSubWorkflowParams(workflowTask));
}
@Test
public void getExceptionWhenNoSubWorkflowParamsPassed() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("FooWorkFLow");
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Task %s is defined as sub-workflow and is missing subWorkflowParams. "
+ "Please check the workflow definition",
workflowTask.getName()));
subWorkflowTaskMapper.getSubWorkflowParams(workflowTask);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class JsonJQTransformTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("json_jq_transform_task");
workflowTask.setType(TaskType.JSON_JQ_TRANSFORM.name());
workflowTask.setTaskDefinition(new TaskDef("json_jq_transform_task"));
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("in1", new String[] {"a", "b"});
taskInput.put("in2", new String[] {"c", "d"});
taskInput.put("queryExpression", "{ out: (.in1 + .in2) }");
workflowTask.setInputParameters(taskInput);
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new JsonJQTransformTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("json_jq_transform_task");
workflowTask.setType(TaskType.JSON_JQ_TRANSFORM.name());
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("in1", new String[] {"a", "b"});
taskInput.put("in2", new String[] {"c", "d"});
taskInput.put("queryExpression", "{ out: (.in1 + .in2) }");
workflowTask.setInputParameters(taskInput);
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new JsonJQTransformTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class LambdaTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("lambda_task");
workflowTask.setType(TaskType.LAMBDA.name());
workflowTask.setTaskDefinition(new TaskDef("lambda_task"));
workflowTask.setScriptExpression(
"if ($.input.a==1){return {testValue: true}} else{return {testValue: false} }");
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new LambdaTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.LAMBDA.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.LAMBDA.name());
workflowTask.setScriptExpression(
"if ($.input.a==1){return {testValue: true}} else{return {testValue: false} }");
String taskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new LambdaTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.LAMBDA.name(), mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HUMAN;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class HumanTaskMapperTest {
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("human_task");
workflowTask.setType(TaskType.HUMAN.name());
String taskId = new IDGenerator().generate();
ParametersUtils parametersUtils = mock(ParametersUtils.class);
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withTaskId(taskId)
.build();
HumanTaskMapper humanTaskMapper = new HumanTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = humanTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TASK_TYPE_HUMAN, mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.mockito.Mockito.mock;
public class TerminateTaskMapperTest {
private ParametersUtils parametersUtils;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_TERMINATE);
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new TerminateTaskMapper(parametersUtils).getMappedTasks(taskMapperContext);
Assert.assertNotNull(mappedTasks);
Assert.assertEquals(1, mappedTasks.size());
Assert.assertEquals(TaskType.TASK_TYPE_TERMINATE, mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class UserDefinedTaskMapperTest {
private IDGenerator idGenerator;
private UserDefinedTaskMapper userDefinedTaskMapper;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
userDefinedTaskMapper = new UserDefinedTaskMapper(parametersUtils, metadataDAO);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("user_task");
workflowTask.setType(TaskType.USER_DEFINED.name());
workflowTask.setTaskDefinition(new TaskDef("user_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = userDefinedTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.USER_DEFINED.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasksException() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("user_task");
workflowTask.setType(TaskType.USER_DEFINED.name());
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// then
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Invalid task specified. Cannot find task by name %s in the task definitions",
workflowTask.getName()));
// when
userDefinedTaskMapper.getMappedTasks(taskMapperContext);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/NoopTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/NoopTaskMapperTest.java | /*
* Copyright 2023 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
public class NoopTaskMapperTest {
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_NOOP);
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = new NoopTaskMapper().getMappedTasks(taskMapperContext);
Assert.assertNotNull(mappedTasks);
Assert.assertEquals(1, mappedTasks.size());
Assert.assertEquals(TaskType.TASK_TYPE_NOOP, mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_DO_WHILE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class DoWhileTaskMapperTest {
private TaskModel task1;
private DeciderService deciderService;
private WorkflowModel workflow;
private WorkflowTask workflowTask1;
private TaskMapperContext taskMapperContext;
private MetadataDAO metadataDAO;
private ParametersUtils parametersUtils;
@Before
public void setup() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.DO_WHILE.name());
workflowTask.setTaskReferenceName("Test");
workflowTask.setInputParameters(Map.of("value", "${workflow.input.foo}"));
task1 = new TaskModel();
task1.setReferenceTaskName("task1");
TaskModel task2 = new TaskModel();
task2.setReferenceTaskName("task2");
workflowTask1 = new WorkflowTask();
workflowTask1.setTaskReferenceName("task1");
WorkflowTask workflowTask2 = new WorkflowTask();
workflowTask2.setTaskReferenceName("task2");
task1.setWorkflowTask(workflowTask1);
task2.setWorkflowTask(workflowTask2);
workflowTask.setLoopOver(Arrays.asList(task1.getWorkflowTask(), task2.getWorkflowTask()));
workflowTask.setLoopCondition(
"if ($.second_task + $.first_task > 10) { false; } else { true; }");
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
workflow.setInput(Map.of("foo", "bar"));
deciderService = Mockito.mock(DeciderService.class);
metadataDAO = Mockito.mock(MetadataDAO.class);
taskMapperContext =
TaskMapperContext.newBuilder()
.withDeciderService(deciderService)
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
parametersUtils = new ParametersUtils(new ObjectMapper());
}
@Test
public void getMappedTasks() {
Mockito.doReturn(Collections.singletonList(task1))
.when(deciderService)
.getTasksToBeScheduled(workflow, workflowTask1, 0);
List<TaskModel> mappedTasks =
new DoWhileTaskMapper(metadataDAO, parametersUtils)
.getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(mappedTasks.size(), 1);
assertEquals(TASK_TYPE_DO_WHILE, mappedTasks.get(0).getTaskType());
assertNotNull(mappedTasks.get(0).getInputData());
assertEquals(Map.of("value", "bar"), mappedTasks.get(0).getInputData());
}
@Test
public void shouldNotScheduleCompletedTask() {
task1.setStatus(TaskModel.Status.COMPLETED);
List<TaskModel> mappedTasks =
new DoWhileTaskMapper(metadataDAO, parametersUtils)
.getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(mappedTasks.size(), 1);
}
@Test
public void testAppendIteration() {
assertEquals("task__1", TaskUtils.appendIteration("task", 1));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class DecisionTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private DeciderService deciderService;
// Subject
private DecisionTaskMapper decisionTaskMapper;
@Autowired private ObjectMapper objectMapper;
@Rule public ExpectedException expectedException = ExpectedException.none();
Map<String, Object> ip1;
WorkflowTask task1;
WorkflowTask task2;
WorkflowTask task3;
@Before
public void setUp() {
parametersUtils = new ParametersUtils(objectMapper);
idGenerator = new IDGenerator();
ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("case", "${workflow.input.case}");
task1 = new WorkflowTask();
task1.setName("Test1");
task1.setInputParameters(ip1);
task1.setTaskReferenceName("t1");
task2 = new WorkflowTask();
task2.setName("Test2");
task2.setInputParameters(ip1);
task2.setTaskReferenceName("t2");
task3 = new WorkflowTask();
task3.setName("Test3");
task3.setInputParameters(ip1);
task3.setTaskReferenceName("t3");
deciderService = mock(DeciderService.class);
decisionTaskMapper = new DecisionTaskMapper();
}
@Test
public void getMappedTasks() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Decision task instance
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
decisionTask.getInputParameters().put("Id", "${workflow.input.Id}");
decisionTask.setCaseExpression(
"if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(decisionTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = decisionTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(2, mappedTasks.size());
assertEquals("decisionTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName());
}
@Test
public void getEvaluatedCaseValue() {
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setInputParameters(ip1);
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("0", Collections.singletonList(task2));
decisionCases.put("1", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(new WorkflowDef());
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("param1", "test1");
workflowInput.put("param2", "test2");
workflowInput.put("case", "0");
workflowModel.setInput(workflowInput);
Map<String, Object> input =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, null, null);
assertEquals("0", decisionTaskMapper.getEvaluatedCaseValue(decisionTask, input));
}
@Test
public void getEvaluatedCaseValueUsingExpression() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Decision task instance
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
decisionTask.getInputParameters().put("Id", "${workflow.input.Id}");
decisionTask.setCaseExpression(
"if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef def = new WorkflowDef();
def.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> evaluatorInput =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, taskDef, null);
assertEquals(
"even", decisionTaskMapper.getEvaluatedCaseValue(decisionTask, evaluatorInput));
}
@Test
public void getEvaluatedCaseValueException() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Decision task instance
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setName("Decision");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setDefaultCase(Collections.singletonList(task1));
decisionTask.setCaseValueParam("case");
decisionTask.getInputParameters().put("Id", "${workflow.input.Id}");
decisionTask.setCaseExpression(
"if ($Id == null) 'bad input'; else if ( ($Id != null && $Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
decisionTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef def = new WorkflowDef();
def.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put(".Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> evaluatorInput =
parametersUtils.getTaskInput(
decisionTask.getInputParameters(), workflowModel, taskDef, null);
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
"Error while evaluating script: " + decisionTask.getCaseExpression());
decisionTaskMapper.getEvaluatedCaseValue(decisionTask, evaluatorInput);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class JoinTaskMapperTest {
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.JOIN.name());
workflowTask.setJoinOn(Arrays.asList("task1", "task2"));
String taskId = new IDGenerator().generate();
WorkflowDef wd = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(wd);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = new JoinTaskMapper().getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(TASK_TYPE_JOIN, mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.execution.evaluators.ValueParamEvaluator;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ContextConfiguration(
classes = {
TestObjectMapperConfiguration.class,
SwitchTaskMapperTest.TestConfiguration.class
})
@RunWith(SpringRunner.class)
public class SwitchTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private DeciderService deciderService;
// Subject
private SwitchTaskMapper switchTaskMapper;
@Configuration
@ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans.
public static class TestConfiguration {}
@Autowired private ObjectMapper objectMapper;
@Autowired private Map<String, Evaluator> evaluators;
@Rule public ExpectedException expectedException = ExpectedException.none();
Map<String, Object> ip1;
WorkflowTask task1;
WorkflowTask task2;
WorkflowTask task3;
@Before
public void setUp() {
parametersUtils = new ParametersUtils(objectMapper);
idGenerator = new IDGenerator();
ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("case", "${workflow.input.case}");
task1 = new WorkflowTask();
task1.setName("Test1");
task1.setInputParameters(ip1);
task1.setTaskReferenceName("t1");
task2 = new WorkflowTask();
task2.setName("Test2");
task2.setInputParameters(ip1);
task2.setTaskReferenceName("t2");
task3 = new WorkflowTask();
task3.setName("Test3");
task3.setInputParameters(ip1);
task3.setTaskReferenceName("t3");
deciderService = mock(DeciderService.class);
switchTaskMapper = new SwitchTaskMapper(evaluators);
}
@Test
public void getMappedTasks() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Switch task instance
WorkflowTask switchTask = new WorkflowTask();
switchTask.setType(TaskType.SWITCH.name());
switchTask.setName("Switch");
switchTask.setTaskReferenceName("switchTask");
switchTask.setDefaultCase(Collections.singletonList(task1));
switchTask.getInputParameters().put("Id", "${workflow.input.Id}");
switchTask.setEvaluatorType(JavascriptEvaluator.NAME);
switchTask.setExpression(
"if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0)) 'even'; else 'odd'; ");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
switchTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "22");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
switchTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(switchTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(2, mappedTasks.size());
assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName());
}
@Test
public void getMappedTasksWithValueParamEvaluator() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
inputMap.put("Id", "${workflow.input.Id}");
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Switch task instance
WorkflowTask switchTask = new WorkflowTask();
switchTask.setType(TaskType.SWITCH.name());
switchTask.setName("Switch");
switchTask.setTaskReferenceName("switchTask");
switchTask.setDefaultCase(Collections.singletonList(task1));
switchTask.getInputParameters().put("Id", "${workflow.input.Id}");
switchTask.setEvaluatorType(ValueParamEvaluator.NAME);
switchTask.setExpression("Id");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
decisionCases.put("odd", Collections.singletonList(task3));
switchTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("Id", "even");
workflowModel.setInput(workflowInput);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
switchTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(switchTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(2, mappedTasks.size());
assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals("Foo", mappedTasks.get(1).getReferenceTaskName());
}
@Test
public void getMappedTasksWhenEvaluatorThrowsException() {
// Given
// Task Definition
TaskDef taskDef = new TaskDef();
Map<String, Object> inputMap = new HashMap<>();
List<Map<String, Object>> taskDefinitionInput = new LinkedList<>();
taskDefinitionInput.add(inputMap);
// Switch task instance
WorkflowTask switchTask = new WorkflowTask();
switchTask.setType(TaskType.SWITCH.name());
switchTask.setName("Switch");
switchTask.setTaskReferenceName("switchTask");
switchTask.setDefaultCase(Collections.singletonList(task1));
switchTask.setEvaluatorType(JavascriptEvaluator.NAME);
switchTask.setExpression("undefinedVariable");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("even", Collections.singletonList(task2));
switchTask.setDecisionCases(decisionCases);
// Workflow instance
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setSchemaVersion(2);
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(workflowDef);
Map<String, Object> body = new HashMap<>();
body.put("input", taskDefinitionInput);
taskDef.getInputTemplate().putAll(body);
Map<String, Object> input =
parametersUtils.getTaskInput(
switchTask.getInputParameters(), workflowModel, null, null);
TaskModel theTask = new TaskModel();
theTask.setReferenceTaskName("Foo");
theTask.setTaskId(idGenerator.generate());
when(deciderService.getTasksToBeScheduled(workflowModel, task2, 0, null))
.thenReturn(Collections.singletonList(theTask));
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(switchTask)
.withTaskInput(input)
.withRetryCount(0)
.withTaskId(idGenerator.generate())
.withDeciderService(deciderService)
.build();
// When
List<TaskModel> mappedTasks = switchTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals("switchTask", mappedTasks.get(0).getReferenceTaskName());
assertEquals(TaskModel.Status.FAILED, mappedTasks.get(0).getStatus());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.*;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import com.netflix.conductor.common.config.ObjectMapperProvider;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
@SuppressWarnings("unchecked")
public class ForkJoinDynamicTaskMapperTest {
private IDGenerator idGenerator;
private ParametersUtils parametersUtils;
private ObjectMapper objectMapper;
private DeciderService deciderService;
private ForkJoinDynamicTaskMapper forkJoinDynamicTaskMapper;
private SystemTaskRegistry systemTaskRegistry;
private MetadataDAO metadataDAO;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
metadataDAO = Mockito.mock(MetadataDAO.class);
idGenerator = new IDGenerator();
parametersUtils = Mockito.mock(ParametersUtils.class);
objectMapper = Mockito.mock(ObjectMapper.class);
deciderService = Mockito.mock(DeciderService.class);
systemTaskRegistry = Mockito.mock(SystemTaskRegistry.class);
forkJoinDynamicTaskMapper =
new ForkJoinDynamicTaskMapper(
idGenerator,
parametersUtils,
objectMapper,
metadataDAO,
systemTaskRegistry);
}
@Test
public void getMappedTasksException() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
TaskModel simpleTask1 = new TaskModel();
simpleTask1.setReferenceTaskName("xdt1");
TaskModel simpleTask2 = new TaskModel();
simpleTask2.setReferenceTaskName("xdt2");
when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0))
.thenReturn(Collections.singletonList(simpleTask1));
when(deciderService.getTasksToBeScheduled(workflowModel, wt3, 0))
.thenReturn(Collections.singletonList(simpleTask2));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withTaskInput(Map.of())
.withWorkflowModel(workflowModel)
.withWorkflowTask(dynamicForkJoinToSchedule)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
// then
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
}
@Test
public void getMappedTasks() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
def.getTasks().add(join);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
TaskModel simpleTask1 = new TaskModel();
simpleTask1.setReferenceTaskName("xdt1");
TaskModel simpleTask2 = new TaskModel();
simpleTask2.setReferenceTaskName("xdt2");
when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0))
.thenReturn(Collections.singletonList(simpleTask1));
when(deciderService.getTasksToBeScheduled(workflowModel, wt3, 0))
.thenReturn(Collections.singletonList(simpleTask2));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflowModel)
.withWorkflowTask(dynamicForkJoinToSchedule)
.withRetryCount(0)
.withTaskInput(Map.of())
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
// then
List<TaskModel> mappedTasks = forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(4, mappedTasks.size());
assertEquals(TASK_TYPE_FORK, mappedTasks.get(0).getTaskType());
assertEquals(TASK_TYPE_JOIN, mappedTasks.get(3).getTaskType());
List<String> joinTaskNames = (List<String>) mappedTasks.get(3).getInputData().get("joinOn");
assertEquals("xdt1, xdt2", String.join(", ", joinTaskNames));
}
@Test
public void getDynamicForkJoinTasksAndInput() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList();
Map<String, Object> input = new HashMap<>();
input.put("k1", "v1");
dtasks.add("junit_task_2", null, "xdt1", input);
HashMap<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
dtasks.add("junit_task_3", null, "xdt2", input2);
Map<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("dynamicTasks", dtasks);
// when
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(dtasks);
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> dynamicForkJoinTasksAndInput =
forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput(
dynamicForkJoinToSchedule, new WorkflowModel(), Map.of());
// then
assertNotNull(dynamicForkJoinTasksAndInput.getLeft());
assertEquals(2, dynamicForkJoinTasksAndInput.getLeft().size());
assertEquals(2, dynamicForkJoinTasksAndInput.getRight().size());
}
@Test
public void getDynamicForkJoinTasksAndInputException() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList();
Map<String, Object> input = new HashMap<>();
input.put("k1", "v1");
dtasks.add("junit_task_2", null, "xdt1", input);
HashMap<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
dtasks.add("junit_task_3", null, "xdt2", input2);
Map<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("dynamicTasks", dtasks);
// when
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(null);
// then
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput(
dynamicForkJoinToSchedule, new WorkflowModel(), Map.of());
}
@Test
public void getDynamicForkTasksAndInput() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
Map<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> dynamicTasks =
forkJoinDynamicTaskMapper.getDynamicForkTasksAndInput(
dynamicForkJoinToSchedule,
new WorkflowModel(),
"dynamicTasks",
dynamicTasksInput);
// then
assertNotNull(dynamicTasks.getLeft());
}
@Test
public void getDynamicForkTasksAndInputException() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", null);
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
// then
expectedException.expect(TerminateWorkflowException.class);
// when
forkJoinDynamicTaskMapper.getDynamicForkTasksAndInput(
dynamicForkJoinToSchedule, new WorkflowModel(), "dynamicTasks", Map.of());
}
@Test
public void testDynamicTaskDuplicateTaskRefName() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule
.getInputParameters()
.put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
def.getTasks().add(join);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// dynamic
when(parametersUtils.getTaskInput(anyMap(), any(WorkflowModel.class), any(), any()))
.thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class)))
.thenReturn(Arrays.asList(wt2, wt3));
TaskModel simpleTask1 = new TaskModel();
simpleTask1.setReferenceTaskName("xdt1");
// Empty list, this is a bad state, workflow should terminate
when(deciderService.getTasksToBeScheduled(workflowModel, wt2, 0))
.thenReturn(new ArrayList<>());
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withTaskInput(Map.of())
.withWorkflowModel(workflowModel)
.withWorkflowTask(dynamicForkJoinToSchedule)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
}
@Test
public void dynamicForkInputsRemainUnwrappedWhenMapsProvided() {
ObjectMapper realObjectMapper = new ObjectMapperProvider().getObjectMapper();
ForkJoinDynamicTaskMapper mapper =
new ForkJoinDynamicTaskMapper(
idGenerator,
parametersUtils,
realObjectMapper,
metadataDAO,
systemTaskRegistry);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskReferenceName("fork_join_dynamic");
workflowTask.setType(TaskType.FORK_JOIN_DYNAMIC.name());
Map<String, Object> forkInput1 = new HashMap<>();
forkInput1.put("param1", "value1");
Map<String, Object> forkInput2 = new HashMap<>();
forkInput2.put("param1", "value2");
Map<String, Object> mapperInput = new HashMap<>();
mapperInput.put("forkTaskWorkflow", "sub_workflow_definition_name");
mapperInput.put("forkTaskWorkflowVersion", "1");
mapperInput.put("forkTaskInputs", Arrays.asList(forkInput1, forkInput2));
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> result =
mapper.getDynamicTasksSimple(workflowTask, mapperInput);
assertNotNull(result);
result.getLeft()
.forEach(task -> assertFalse(task.getInputParameters().containsKey("input")));
result.getRight().values().forEach(input -> assertFalse(input.containsKey("input")));
WorkflowTask firstTask = result.getLeft().get(0);
WorkflowTask secondTask = result.getLeft().get(1);
assertEquals("value1", firstTask.getInputParameters().get("param1"));
assertEquals("value2", secondTask.getInputParameters().get("param1"));
assertEquals(
"value1", result.getRight().get(firstTask.getTaskReferenceName()).get("param1"));
assertEquals(
"value2", result.getRight().get(secondTask.getTaskReferenceName()).get("param1"));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class HTTPTaskMapperTest {
private HTTPTaskMapper httpTaskMapper;
private IDGenerator idGenerator;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
httpTaskMapper = new HTTPTaskMapper(parametersUtils, metadataDAO);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("http_task");
workflowTask.setType(TaskType.HTTP.name());
workflowTask.setTaskDefinition(new TaskDef("http_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("http_task");
workflowTask.setType(TaskType.HTTP.name());
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
public class SetVariableTaskMapperTest {
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.TASK_TYPE_SET_VARIABLE);
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = new SetVariableTaskMapper().getMappedTasks(taskMapperContext);
Assert.assertNotNull(mappedTasks);
Assert.assertEquals(1, mappedTasks.size());
Assert.assertEquals(TaskType.TASK_TYPE_SET_VARIABLE, mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class SimpleTaskMapperTest {
private SimpleTaskMapper simpleTaskMapper;
private IDGenerator idGenerator = new IDGenerator();
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
simpleTaskMapper = new SimpleTaskMapper(parametersUtils);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("simple_task");
workflowTask.setTaskDefinition(new TaskDef("simple_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = simpleTaskMapper.getMappedTasks(taskMapperContext);
assertNotNull(mappedTasks);
assertEquals(1, mappedTasks.size());
}
@Test
public void getMappedTasksException() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("simple_task");
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// then
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Invalid task. Task %s does not have a definition",
workflowTask.getName()));
// when
simpleTaskMapper.getMappedTasks(taskMapperContext);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
public class EventTaskMapperTest {
@Test
public void getMappedTasks() {
ParametersUtils parametersUtils = Mockito.mock(ParametersUtils.class);
EventTaskMapper eventTaskMapper = new EventTaskMapper(parametersUtils);
WorkflowTask taskToBeScheduled = new WorkflowTask();
taskToBeScheduled.setSink("SQSSINK");
String taskId = new IDGenerator().generate();
Map<String, Object> eventTaskInput = new HashMap<>();
eventTaskInput.put("sink", "SQSSINK");
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(eventTaskInput);
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(taskToBeScheduled)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks = eventTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
TaskModel eventTask = mappedTasks.get(0);
assertEquals(taskId, eventTask.getTaskId());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
public class InlineTaskMapperTest {
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("inline_task");
workflowTask.setType(TaskType.INLINE.name());
workflowTask.setTaskDefinition(new TaskDef("inline_task"));
workflowTask.setEvaluatorType(JavascriptEvaluator.NAME);
workflowTask.setExpression(
"function scriptFun() {if ($.input.a==1){return {testValue: true}} else{return "
+ "{testValue: false} }}; scriptFun();");
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new InlineTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.INLINE.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setType(TaskType.INLINE.name());
workflowTask.setEvaluatorType(JavascriptEvaluator.NAME);
workflowTask.setExpression(
"function scriptFun() {if ($.input.a==1){return {testValue: true}} else{return "
+ "{testValue: false} }}; scriptFun();");
String taskId = new IDGenerator().generate();
WorkflowDef workflowDef = new WorkflowDef();
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(null)
.withWorkflowTask(workflowTask)
.withRetryCount(0)
.withTaskId(taskId)
.build();
List<TaskModel> mappedTasks =
new InlineTaskMapper(parametersUtils, metadataDAO)
.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.INLINE.name(), mappedTasks.get(0).getTaskType());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class DynamicTaskMapperTest {
@Rule public ExpectedException expectedException = ExpectedException.none();
private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
private DynamicTaskMapper dynamicTaskMapper;
@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
dynamicTaskMapper = new DynamicTaskMapper(parametersUtils, metadataDAO);
}
@Test
public void getMappedTasks() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("DynoTask");
workflowTask.setDynamicTaskNameParam("dynamicTaskName");
TaskDef taskDef = new TaskDef();
taskDef.setName("DynoTask");
workflowTask.setTaskDefinition(taskDef);
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("dynamicTaskName", "DynoTask");
when(parametersUtils.getTaskInput(
anyMap(), any(WorkflowModel.class), any(TaskDef.class), anyString()))
.thenReturn(taskInput);
String taskId = new IDGenerator().generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(workflowTask.getTaskDefinition())
.withWorkflowTask(workflowTask)
.withTaskInput(taskInput)
.withRetryCount(0)
.withTaskId(taskId)
.build();
when(metadataDAO.getTaskDef("DynoTask")).thenReturn(new TaskDef());
List<TaskModel> mappedTasks = dynamicTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
TaskModel dynamicTask = mappedTasks.get(0);
assertEquals(taskId, dynamicTask.getTaskId());
}
@Test
public void getDynamicTaskName() {
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("dynamicTaskName", "DynoTask");
String dynamicTaskName = dynamicTaskMapper.getDynamicTaskName(taskInput, "dynamicTaskName");
assertEquals("DynoTask", dynamicTaskName);
}
@Test
public void getDynamicTaskNameNotAvailable() {
Map<String, Object> taskInput = new HashMap<>();
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Cannot map a dynamic task based on the parameter and input. "
+ "Parameter= %s, input= %s",
"dynamicTaskName", taskInput));
dynamicTaskMapper.getDynamicTaskName(taskInput, "dynamicTaskName");
}
@Test
public void getDynamicTaskDefinition() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Foo");
TaskDef taskDef = new TaskDef();
taskDef.setName("Foo");
workflowTask.setTaskDefinition(taskDef);
when(metadataDAO.getTaskDef(any())).thenReturn(new TaskDef());
// when
TaskDef dynamicTaskDefinition = dynamicTaskMapper.getDynamicTaskDefinition(workflowTask);
assertEquals(dynamicTaskDefinition, taskDef);
}
@Test
public void getDynamicTaskDefinitionNull() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Foo");
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
String.format(
"Invalid task specified. Cannot find task by name %s in the task definitions",
workflowTask.getName()));
dynamicTaskMapper.getDynamicTaskDefinition(workflowTask);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.execution.tasks.Wait;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
public class WaitTaskMapperTest {
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
ParametersUtils parametersUtils = mock(ParametersUtils.class);
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TASK_TYPE_WAIT, mappedTasks.get(0).getTaskType());
}
@Test
public void testWaitForever() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
ParametersUtils parametersUtils = mock(ParametersUtils.class);
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.IN_PROGRESS);
assertTrue(mappedTasks.get(0).getOutputData().isEmpty());
}
@Test
public void testWaitUntil() {
String dateFormat = "yyyy-MM-dd HH:mm";
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormat);
LocalDateTime now = LocalDateTime.now();
String formatted = formatter.format(now);
System.out.println(formatted);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
Map<String, Object> input = Map.of(Wait.UNTIL_INPUT, formatted);
workflowTask.setInputParameters(input);
ParametersUtils parametersUtils = mock(ParametersUtils.class);
doReturn(input).when(parametersUtils).getTaskInputV2(any(), any(), any(), any());
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(Map.of(Wait.UNTIL_INPUT, formatted))
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.IN_PROGRESS);
assertEquals(mappedTasks.get(0).getCallbackAfterSeconds(), 0L);
}
@Test
public void testWaitDuration() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
Map<String, Object> input = Map.of(Wait.DURATION_INPUT, "1s");
workflowTask.setInputParameters(input);
ParametersUtils parametersUtils = mock(ParametersUtils.class);
doReturn(input).when(parametersUtils).getTaskInputV2(any(), any(), any(), any());
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(Map.of(Wait.DURATION_INPUT, "1s"))
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.IN_PROGRESS);
assertTrue(mappedTasks.get(0).getCallbackAfterSeconds() <= 1L);
}
@Test
public void testInvalidWaitConfig() {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("Wait_task");
workflowTask.setType(TaskType.WAIT.name());
String taskId = new IDGenerator().generate();
Map<String, Object> input =
Map.of(Wait.DURATION_INPUT, "1s", Wait.UNTIL_INPUT, "2022-12-12");
workflowTask.setInputParameters(input);
ParametersUtils parametersUtils = mock(ParametersUtils.class);
doReturn(input).when(parametersUtils).getTaskInputV2(any(), any(), any(), any());
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(
Map.of(Wait.DURATION_INPUT, "1s", Wait.UNTIL_INPUT, "2022-12-12"))
.withRetryCount(0)
.withTaskId(taskId)
.build();
WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils);
// When
List<TaskModel> mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertEquals(mappedTasks.get(0).getStatus(), TaskModel.Status.FAILED_WITH_TERMINAL_ERROR);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.DeciderService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK;
import static org.junit.Assert.assertEquals;
public class ForkJoinTaskMapperTest {
private DeciderService deciderService;
private ForkJoinTaskMapper forkJoinTaskMapper;
private IDGenerator idGenerator;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
deciderService = Mockito.mock(DeciderService.class);
forkJoinTaskMapper = new ForkJoinTaskMapper();
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
WorkflowDef def = new WorkflowDef();
def.setName("FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask forkTask = new WorkflowTask();
forkTask.setType(TaskType.FORK_JOIN.name());
forkTask.setTaskReferenceName("forktask");
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "workflow.input.param1");
ip1.put("p2", "workflow.input.param2");
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("t1");
WorkflowTask wft3 = new WorkflowTask();
wft3.setName("junit_task_3");
wft3.setInputParameters(ip1);
wft3.setTaskReferenceName("t3");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "workflow.input.param1");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
WorkflowTask wft4 = new WorkflowTask();
wft4.setName("junit_task_4");
wft4.setInputParameters(ip2);
wft4.setTaskReferenceName("t4");
forkTask.getForkTasks().add(Arrays.asList(wft1, wft3));
forkTask.getForkTasks().add(Collections.singletonList(wft2));
def.getTasks().add(forkTask);
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("forktask_join");
join.setJoinOn(Arrays.asList("t3", "t2"));
def.getTasks().add(join);
def.getTasks().add(wft4);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
TaskModel task1 = new TaskModel();
task1.setReferenceTaskName(wft1.getTaskReferenceName());
TaskModel task3 = new TaskModel();
task3.setReferenceTaskName(wft3.getTaskReferenceName());
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1, 0))
.thenReturn(Collections.singletonList(task1));
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2, 0))
.thenReturn(Collections.singletonList(task3));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withWorkflowTask(forkTask)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
List<TaskModel> mappedTasks = forkJoinTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(3, mappedTasks.size());
assertEquals(TASK_TYPE_FORK, mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasksException() {
WorkflowDef def = new WorkflowDef();
def.setName("FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask forkTask = new WorkflowTask();
forkTask.setType(TaskType.FORK_JOIN.name());
forkTask.setTaskReferenceName("forktask");
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "workflow.input.param1");
ip1.put("p2", "workflow.input.param2");
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("t1");
WorkflowTask wft3 = new WorkflowTask();
wft3.setName("junit_task_3");
wft3.setInputParameters(ip1);
wft3.setTaskReferenceName("t3");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "workflow.input.param1");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
WorkflowTask wft4 = new WorkflowTask();
wft4.setName("junit_task_4");
wft4.setInputParameters(ip2);
wft4.setTaskReferenceName("t4");
forkTask.getForkTasks().add(Arrays.asList(wft1, wft3));
forkTask.getForkTasks().add(Collections.singletonList(wft2));
def.getTasks().add(forkTask);
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("forktask_join");
join.setJoinOn(Arrays.asList("t3", "t2"));
def.getTasks().add(wft4);
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowDefinition(def);
TaskModel task1 = new TaskModel();
task1.setReferenceTaskName(wft1.getTaskReferenceName());
TaskModel task3 = new TaskModel();
task3.setReferenceTaskName(wft3.getTaskReferenceName());
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1, 0))
.thenReturn(Collections.singletonList(task1));
Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2, 0))
.thenReturn(Collections.singletonList(task3));
String taskId = idGenerator.generate();
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withWorkflowTask(forkTask)
.withRetryCount(0)
.withTaskId(taskId)
.withDeciderService(deciderService)
.build();
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(
"Fork task definition is not followed by a join task. Check the blueprint");
forkJoinTaskMapper.getMappedTasks(taskMapperContext);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapperTest.java | core/src/test/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapperTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;
import java.util.HashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class KafkaPublishTaskMapperTest {
private IDGenerator idGenerator;
private KafkaPublishTaskMapper kafkaTaskMapper;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
ParametersUtils parametersUtils = mock(ParametersUtils.class);
MetadataDAO metadataDAO = mock(MetadataDAO.class);
kafkaTaskMapper = new KafkaPublishTaskMapper(parametersUtils, metadataDAO);
idGenerator = new IDGenerator();
}
@Test
public void getMappedTasks() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("kafka_task");
workflowTask.setType(TaskType.KAFKA_PUBLISH.name());
workflowTask.setTaskDefinition(new TaskDef("kafka_task"));
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(new TaskDef())
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
}
@Test
public void getMappedTasks_WithoutTaskDef() {
// Given
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("kafka_task");
workflowTask.setType(TaskType.KAFKA_PUBLISH.name());
String taskId = idGenerator.generate();
String retriedTaskId = idGenerator.generate();
WorkflowModel workflow = new WorkflowModel();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskDef taskdefinition = new TaskDef();
String testExecutionNameSpace = "testExecutionNameSpace";
taskdefinition.setExecutionNameSpace(testExecutionNameSpace);
String testIsolationGroupId = "testIsolationGroupId";
taskdefinition.setIsolationGroupId(testIsolationGroupId);
TaskMapperContext taskMapperContext =
TaskMapperContext.newBuilder()
.withWorkflowModel(workflow)
.withTaskDefinition(taskdefinition)
.withWorkflowTask(workflowTask)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();
// when
List<TaskModel> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
assertEquals(testExecutionNameSpace, mappedTasks.get(0).getExecutionNameSpace());
assertEquals(testIsolationGroupId, mappedTasks.get(0).getIsolationGroupId());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java | core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java | /*
* Copyright 2021 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class JsonUtilsTest {
private JsonUtils jsonUtils;
@Autowired private ObjectMapper objectMapper;
@Before
public void setup() {
jsonUtils = new JsonUtils(objectMapper);
}
@Test
public void testArray() {
List<Object> list = new LinkedList<>();
Map<String, Object> map = new HashMap<>();
map.put("externalId", "[{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}]");
map.put("name", "conductor");
map.put("version", 2);
list.add(map);
//noinspection unchecked
map = (Map<String, Object>) list.get(0);
assertTrue(map.get("externalId") instanceof String);
int before = list.size();
jsonUtils.expand(list);
assertEquals(before, list.size());
//noinspection unchecked
map = (Map<String, Object>) list.get(0);
assertTrue(map.get("externalId") instanceof ArrayList);
}
@Test
public void testMap() {
Map<String, Object> map = new HashMap<>();
map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}");
map.put("name", "conductor");
map.put("version", 2);
assertTrue(map.get("externalId") instanceof String);
jsonUtils.expand(map);
assertTrue(map.get("externalId") instanceof LinkedHashMap);
}
@Test
public void testMultiLevelMap() {
Map<String, Object> parentMap = new HashMap<>();
parentMap.put("requestId", "abcde");
parentMap.put("status", "PROCESSED");
Map<String, Object> childMap = new HashMap<>();
childMap.put("path", "test/path");
childMap.put("type", "VIDEO");
Map<String, Object> grandChildMap = new HashMap<>();
grandChildMap.put("duration", "370");
grandChildMap.put("passed", "true");
childMap.put("metadata", grandChildMap);
parentMap.put("asset", childMap);
Object jsonObject = jsonUtils.expand(parentMap);
assertNotNull(jsonObject);
}
// This test verifies that the types of the elements in the input are maintained upon expanding
// the JSON object
@Test
public void testTypes() throws Exception {
String map =
"{\"requestId\":\"1375128656908832001\",\"workflowId\":\"fc147e1d-5408-4d41-b066-53cb2e551d0e\","
+ "\"inner\":{\"num\":42,\"status\":\"READY\"}}";
jsonUtils.expand(map);
Object jsonObject = jsonUtils.expand(map);
assertNotNull(jsonObject);
assertTrue(jsonObject instanceof LinkedHashMap);
assertTrue(((LinkedHashMap<?, ?>) jsonObject).get("requestId") instanceof String);
assertTrue(((LinkedHashMap<?, ?>) jsonObject).get("workflowId") instanceof String);
assertTrue(((LinkedHashMap<?, ?>) jsonObject).get("inner") instanceof LinkedHashMap);
assertTrue(
((LinkedHashMap<?, ?>) ((LinkedHashMap<?, ?>) jsonObject).get("inner")).get("num")
instanceof Integer);
assertTrue(
((LinkedHashMap<?, ?>) ((LinkedHashMap<?, ?>) jsonObject).get("inner"))
.get("status")
instanceof String);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtilsTest.java | core/src/test/java/com/netflix/conductor/core/utils/ExternalPayloadStorageUtilsTest.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.lang3.StringUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.unit.DataSize;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.model.TaskModel.Status.FAILED_WITH_TERMINAL_ERROR;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class ExternalPayloadStorageUtilsTest {
private ExternalPayloadStorage externalPayloadStorage;
private ExternalStorageLocation location;
@Autowired private ObjectMapper objectMapper;
// Subject
private ExternalPayloadStorageUtils externalPayloadStorageUtils;
@Rule public ExpectedException expectedException = ExpectedException.none();
@Before
public void setup() {
externalPayloadStorage = mock(ExternalPayloadStorage.class);
ConductorProperties properties = mock(ConductorProperties.class);
location = new ExternalStorageLocation();
location.setPath("some/test/path");
when(properties.getWorkflowInputPayloadSizeThreshold())
.thenReturn(DataSize.ofKilobytes(10L));
when(properties.getMaxWorkflowInputPayloadSizeThreshold())
.thenReturn(DataSize.ofKilobytes(10240L));
when(properties.getWorkflowOutputPayloadSizeThreshold())
.thenReturn(DataSize.ofKilobytes(10L));
when(properties.getMaxWorkflowOutputPayloadSizeThreshold())
.thenReturn(DataSize.ofKilobytes(10240L));
when(properties.getTaskInputPayloadSizeThreshold()).thenReturn(DataSize.ofKilobytes(10L));
when(properties.getMaxTaskInputPayloadSizeThreshold())
.thenReturn(DataSize.ofKilobytes(10240L));
when(properties.getTaskOutputPayloadSizeThreshold()).thenReturn(DataSize.ofKilobytes(10L));
when(properties.getMaxTaskOutputPayloadSizeThreshold())
.thenReturn(DataSize.ofKilobytes(10240L));
externalPayloadStorageUtils =
new ExternalPayloadStorageUtils(externalPayloadStorage, properties, objectMapper);
}
@Test
public void testDownloadPayload() throws IOException {
String path = "test/payload";
Map<String, Object> payload = new HashMap<>();
payload.put("key1", "value1");
payload.put("key2", 200);
byte[] payloadBytes = objectMapper.writeValueAsString(payload).getBytes();
when(externalPayloadStorage.download(path))
.thenReturn(new ByteArrayInputStream(payloadBytes));
Map<String, Object> result = externalPayloadStorageUtils.downloadPayload(path);
assertNotNull(result);
assertEquals(payload, result);
}
@SuppressWarnings("unchecked")
@Test
public void testUploadTaskPayload() throws IOException {
AtomicInteger uploadCount = new AtomicInteger(0);
InputStream stream =
com.netflix.conductor.core.utils.ExternalPayloadStorageUtilsTest.class
.getResourceAsStream("/payload.json");
Map<String, Object> payload = objectMapper.readValue(stream, Map.class);
byte[] payloadBytes = objectMapper.writeValueAsString(payload).getBytes();
when(externalPayloadStorage.getLocation(
ExternalPayloadStorage.Operation.WRITE,
ExternalPayloadStorage.PayloadType.TASK_INPUT,
"",
payloadBytes))
.thenReturn(location);
doAnswer(
invocation -> {
uploadCount.incrementAndGet();
return null;
})
.when(externalPayloadStorage)
.upload(anyString(), any(), anyLong());
TaskModel task = new TaskModel();
task.setInputData(payload);
externalPayloadStorageUtils.verifyAndUpload(
task, ExternalPayloadStorage.PayloadType.TASK_INPUT);
assertTrue(StringUtils.isNotEmpty(task.getExternalInputPayloadStoragePath()));
assertFalse(task.getInputData().isEmpty());
assertEquals(1, uploadCount.get());
assertNotNull(task.getExternalInputPayloadStoragePath());
}
@SuppressWarnings("unchecked")
@Test
public void testUploadWorkflowPayload() throws IOException {
AtomicInteger uploadCount = new AtomicInteger(0);
InputStream stream =
com.netflix.conductor.core.utils.ExternalPayloadStorageUtilsTest.class
.getResourceAsStream("/payload.json");
Map<String, Object> payload = objectMapper.readValue(stream, Map.class);
byte[] payloadBytes = objectMapper.writeValueAsString(payload).getBytes();
when(externalPayloadStorage.getLocation(
ExternalPayloadStorage.Operation.WRITE,
ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT,
"",
payloadBytes))
.thenReturn(location);
doAnswer(
invocation -> {
uploadCount.incrementAndGet();
return null;
})
.when(externalPayloadStorage)
.upload(anyString(), any(), anyLong());
WorkflowModel workflow = new WorkflowModel();
WorkflowDef def = new WorkflowDef();
def.setName("name");
def.setVersion(1);
workflow.setOutput(payload);
workflow.setWorkflowDefinition(def);
externalPayloadStorageUtils.verifyAndUpload(
workflow, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT);
assertTrue(StringUtils.isNotEmpty(workflow.getExternalOutputPayloadStoragePath()));
assertFalse(workflow.getOutput().isEmpty());
assertEquals(1, uploadCount.get());
assertNotNull(workflow.getExternalOutputPayloadStoragePath());
}
@Test
public void testUploadHelper() {
AtomicInteger uploadCount = new AtomicInteger(0);
String path = "some/test/path.json";
ExternalStorageLocation location = new ExternalStorageLocation();
location.setPath(path);
when(externalPayloadStorage.getLocation(any(), any(), any(), any())).thenReturn(location);
doAnswer(
invocation -> {
uploadCount.incrementAndGet();
return null;
})
.when(externalPayloadStorage)
.upload(anyString(), any(), anyLong());
assertEquals(
path,
externalPayloadStorageUtils.uploadHelper(
new byte[] {}, 10L, ExternalPayloadStorage.PayloadType.TASK_OUTPUT));
assertEquals(1, uploadCount.get());
}
@Test
public void testFailTaskWithInputPayload() {
TaskModel task = new TaskModel();
task.setInputData(new HashMap<>());
externalPayloadStorageUtils.failTask(
task, ExternalPayloadStorage.PayloadType.TASK_INPUT, "error");
assertNotNull(task);
assertTrue(task.getInputData().isEmpty());
assertEquals(FAILED_WITH_TERMINAL_ERROR, task.getStatus());
}
@Test
public void testFailTaskWithOutputPayload() {
TaskModel task = new TaskModel();
task.setOutputData(new HashMap<>());
externalPayloadStorageUtils.failTask(
task, ExternalPayloadStorage.PayloadType.TASK_OUTPUT, "error");
assertNotNull(task);
assertTrue(task.getOutputData().isEmpty());
assertEquals(FAILED_WITH_TERMINAL_ERROR, task.getStatus());
}
@Test
public void testFailWorkflowWithInputPayload() {
WorkflowModel workflow = new WorkflowModel();
workflow.setInput(new HashMap<>());
expectedException.expect(TerminateWorkflowException.class);
externalPayloadStorageUtils.failWorkflow(
workflow, ExternalPayloadStorage.PayloadType.TASK_INPUT, "error");
assertNotNull(workflow);
assertTrue(workflow.getInput().isEmpty());
assertEquals(WorkflowModel.Status.FAILED, workflow.getStatus());
}
@Test
public void testFailWorkflowWithOutputPayload() {
WorkflowModel workflow = new WorkflowModel();
workflow.setOutput(new HashMap<>());
expectedException.expect(TerminateWorkflowException.class);
externalPayloadStorageUtils.failWorkflow(
workflow, ExternalPayloadStorage.PayloadType.TASK_OUTPUT, "error");
assertNotNull(workflow);
assertTrue(workflow.getOutput().isEmpty());
assertEquals(WorkflowModel.Status.FAILED, workflow.getStatus());
}
@Test
public void testShouldUpload() {
Map<String, Object> payload = new HashMap<>();
payload.put("key1", "value1");
payload.put("key2", "value2");
TaskModel task = new TaskModel();
task.setInputData(payload);
task.setOutputData(payload);
WorkflowModel workflow = new WorkflowModel();
workflow.setInput(payload);
workflow.setOutput(payload);
assertTrue(
externalPayloadStorageUtils.shouldUpload(
task, ExternalPayloadStorage.PayloadType.TASK_INPUT));
assertTrue(
externalPayloadStorageUtils.shouldUpload(
task, ExternalPayloadStorage.PayloadType.TASK_OUTPUT));
assertTrue(
externalPayloadStorageUtils.shouldUpload(
task, ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT));
assertTrue(
externalPayloadStorageUtils.shouldUpload(
task, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/utils/QueueUtilsTest.java | core/src/test/java/com/netflix/conductor/core/utils/QueueUtilsTest.java | /*
* Copyright 2020 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import org.junit.Assert;
import org.junit.Test;
public class QueueUtilsTest {
@Test
public void queueNameWithTypeAndIsolationGroup() {
String queueNameGenerated = QueueUtils.getQueueName("tType", null, "isolationGroup", null);
String queueNameGeneratedOnlyType = QueueUtils.getQueueName("tType", null, null, null);
String queueNameGeneratedWithAllValues =
QueueUtils.getQueueName("tType", "domain", "iso", "eN");
Assert.assertEquals("tType-isolationGroup", queueNameGenerated);
Assert.assertEquals("tType", queueNameGeneratedOnlyType);
Assert.assertEquals("domain:tType@eN-iso", queueNameGeneratedWithAllValues);
}
@Test
public void notIsolatedIfSeparatorNotPresent() {
String notIsolatedQueue = "notIsolated";
Assert.assertFalse(QueueUtils.isIsolatedQueue(notIsolatedQueue));
}
@Test
public void testGetExecutionNameSpace() {
String executionNameSpace = QueueUtils.getExecutionNameSpace("domain:queueName@eN-iso");
Assert.assertEquals(executionNameSpace, "eN");
}
@Test
public void testGetQueueExecutionNameSpaceEmpty() {
Assert.assertEquals(QueueUtils.getExecutionNameSpace("queueName"), "");
}
@Test
public void testGetQueueExecutionNameSpaceWithIsolationGroup() {
Assert.assertEquals(
QueueUtils.getExecutionNameSpace("domain:test@executionNameSpace-isolated"),
"executionNameSpace");
}
@Test
public void testGetQueueName() {
Assert.assertEquals(
"domain:taskType@eN-isolated",
QueueUtils.getQueueName("taskType", "domain", "isolated", "eN"));
}
@Test
public void testGetTaskType() {
Assert.assertEquals("taskType", QueueUtils.getTaskType("domain:taskType-isolated"));
}
@Test
public void testGetTaskTypeWithoutDomain() {
Assert.assertEquals("taskType", QueueUtils.getTaskType("taskType-isolated"));
}
@Test
public void testGetTaskTypeWithoutDomainAndWithoutIsolationGroup() {
Assert.assertEquals("taskType", QueueUtils.getTaskType("taskType"));
}
@Test
public void testGetTaskTypeWithoutDomainAndWithExecutionNameSpace() {
Assert.assertEquals("taskType", QueueUtils.getTaskType("taskType@eN"));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/utils/SemaphoreUtilTest.java | core/src/test/java/com/netflix/conductor/core/utils/SemaphoreUtilTest.java | /*
* Copyright 2020 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.IntStream;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("ToArrayCallWithZeroLengthArrayArgument")
public class SemaphoreUtilTest {
@Test
public void testBlockAfterAvailablePermitsExhausted() throws Exception {
int threads = 5;
ExecutorService executorService = Executors.newFixedThreadPool(threads);
SemaphoreUtil semaphoreUtil = new SemaphoreUtil(threads);
List<CompletableFuture<Void>> futuresList = new ArrayList<>();
IntStream.range(0, threads)
.forEach(
t ->
futuresList.add(
CompletableFuture.runAsync(
() -> semaphoreUtil.acquireSlots(1),
executorService)));
CompletableFuture<Void> allFutures =
CompletableFuture.allOf(
futuresList.toArray(new CompletableFuture[futuresList.size()]));
allFutures.get();
assertEquals(0, semaphoreUtil.availableSlots());
assertFalse(semaphoreUtil.acquireSlots(1));
executorService.shutdown();
}
@Test
public void testAllowsPollingWhenPermitBecomesAvailable() throws Exception {
int threads = 5;
ExecutorService executorService = Executors.newFixedThreadPool(threads);
SemaphoreUtil semaphoreUtil = new SemaphoreUtil(threads);
List<CompletableFuture<Void>> futuresList = new ArrayList<>();
IntStream.range(0, threads)
.forEach(
t ->
futuresList.add(
CompletableFuture.runAsync(
() -> semaphoreUtil.acquireSlots(1),
executorService)));
CompletableFuture<Void> allFutures =
CompletableFuture.allOf(
futuresList.toArray(new CompletableFuture[futuresList.size()]));
allFutures.get();
assertEquals(0, semaphoreUtil.availableSlots());
semaphoreUtil.completeProcessing(1);
assertTrue(semaphoreUtil.availableSlots() > 0);
assertTrue(semaphoreUtil.acquireSlots(1));
executorService.shutdown();
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java | core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java | /*
* Copyright 2021 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
@SuppressWarnings("rawtypes")
public class ParametersUtilsTest {
private ParametersUtils parametersUtils;
private JsonUtils jsonUtils;
@Autowired private ObjectMapper objectMapper;
@Before
public void setup() {
parametersUtils = new ParametersUtils(objectMapper);
jsonUtils = new JsonUtils(objectMapper);
}
@Test
public void testReplace() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("name", "conductor");
map.put("version", 2);
map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}");
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$.externalId}");
input.put("k4", "${name}");
input.put("k5", "${version}");
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
assertEquals("{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}", replaced.get("k1"));
assertEquals("conductor", replaced.get("k4"));
assertEquals(2, replaced.get("k5"));
}
@Test
public void testReplaceWithArrayExpand() {
List<Object> list = new LinkedList<>();
Map<String, Object> map = new HashMap<>();
map.put("externalId", "[{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}]");
map.put("name", "conductor");
map.put("version", 2);
list.add(map);
jsonUtils.expand(list);
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$..externalId}");
input.put("k2", "${$[0].externalId[0].taskRefName}");
input.put("k3", "${__json_externalId.taskRefName}");
input.put("k4", "${$[0].name}");
input.put("k5", "${$[0].version}");
Map<String, Object> replaced = parametersUtils.replace(input, list);
assertNotNull(replaced);
assertEquals(replaced.get("k2"), "t001");
assertNull(replaced.get("k3"));
assertEquals(replaced.get("k4"), "conductor");
assertEquals(replaced.get("k5"), 2);
}
@Test
public void testReplaceWithMapExpand() {
Map<String, Object> map = new HashMap<>();
map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}");
map.put("name", "conductor");
map.put("version", 2);
jsonUtils.expand(map);
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$.externalId}");
input.put("k2", "${externalId.taskRefName}");
input.put("k4", "${name}");
input.put("k5", "${version}");
Map<String, Object> replaced = parametersUtils.replace(input, map);
assertNotNull(replaced);
assertEquals("t001", replaced.get("k2"));
assertNull(replaced.get("k3"));
assertEquals("conductor", replaced.get("k4"));
assertEquals(2, replaced.get("k5"));
}
@Test
public void testReplaceConcurrent() throws ExecutionException, InterruptedException {
ExecutorService executorService = Executors.newFixedThreadPool(2);
AtomicReference<String> generatedId = new AtomicReference<>("test-0");
Map<String, Object> input = new HashMap<>();
Map<String, Object> payload = new HashMap<>();
payload.put("event", "conductor:TEST_EVENT");
payload.put("someId", generatedId);
input.put("payload", payload);
input.put("name", "conductor");
input.put("version", 2);
Map<String, Object> inputParams = new HashMap<>();
inputParams.put("k1", "${payload.someId}");
inputParams.put("k2", "${name}");
CompletableFuture.runAsync(
() -> {
for (int i = 0; i < 10000; i++) {
generatedId.set("test-" + i);
payload.put("someId", generatedId.get());
Object jsonObj = null;
try {
jsonObj =
objectMapper.readValue(
objectMapper.writeValueAsString(input),
Object.class);
} catch (JsonProcessingException e) {
e.printStackTrace();
return;
}
Map<String, Object> replaced =
parametersUtils.replace(inputParams, jsonObj);
assertNotNull(replaced);
assertEquals(generatedId.get(), replaced.get("k1"));
assertEquals("conductor", replaced.get("k2"));
assertNull(replaced.get("k3"));
}
},
executorService)
.get();
executorService.shutdown();
}
// Tests ParametersUtils with Map and List input values, and verifies input map is not mutated
// by ParametersUtils.
@Test
public void testReplaceInputWithMapAndList() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("name", "conductor");
map.put("version", 2);
map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}");
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$.externalId}");
input.put("k2", "${name}");
input.put("k3", "${version}");
input.put("k4", "${}");
input.put("k5", "${ }");
Map<String, String> mapValue = new HashMap<>();
mapValue.put("name", "${name}");
mapValue.put("version", "${version}");
input.put("map", mapValue);
List<String> listValue = new ArrayList<>();
listValue.add("${name}");
listValue.add("${version}");
input.put("list", listValue);
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
// Verify that values are replaced correctly.
assertEquals("{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}", replaced.get("k1"));
assertEquals("conductor", replaced.get("k2"));
assertEquals(2, replaced.get("k3"));
assertEquals("", replaced.get("k4"));
assertEquals("", replaced.get("k5"));
Map replacedMap = (Map) replaced.get("map");
assertEquals("conductor", replacedMap.get("name"));
assertEquals(2, replacedMap.get("version"));
List replacedList = (List) replaced.get("list");
assertEquals(2, replacedList.size());
assertEquals("conductor", replacedList.get(0));
assertEquals(2, replacedList.get(1));
// Verify that input map is not mutated
assertEquals("${$.externalId}", input.get("k1"));
assertEquals("${name}", input.get("k2"));
assertEquals("${version}", input.get("k3"));
Map inputMap = (Map) input.get("map");
assertEquals("${name}", inputMap.get("name"));
assertEquals("${version}", inputMap.get("version"));
List inputList = (List) input.get("list");
assertEquals(2, inputList.size());
assertEquals("${name}", inputList.get(0));
assertEquals("${version}", inputList.get(1));
}
@Test
public void testNestedPathExpressions() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("name", "conductor");
map.put("index", 1);
map.put("mapValue", "a");
map.put("recordIds", List.of(1, 2, 3));
map.put("map", Map.of("a", List.of(1, 2, 3), "b", List.of(2, 4, 5), "c", List.of(3, 7, 8)));
Map<String, Object> input = new HashMap<>();
input.put("k1", "${recordIds[${index}]}");
input.put("k2", "${map.${mapValue}[${index}]}");
input.put("k3", "${map.b[${map.${mapValue}[${index}]}]}");
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
assertEquals(2, replaced.get("k1"));
assertEquals(2, replaced.get("k2"));
assertEquals(5, replaced.get("k3"));
}
@Test
public void testReplaceWithLineTerminators() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("name", "conductor");
map.put("version", 2);
Map<String, Object> input = new HashMap<>();
input.put("k1", "Name: ${name}; Version: ${version};");
input.put("k2", "Name: ${name};\nVersion: ${version};");
input.put("k3", "Name: ${name};\rVersion: ${version};");
input.put("k4", "Name: ${name};\r\nVersion: ${version};");
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
assertEquals("Name: conductor; Version: 2;", replaced.get("k1"));
assertEquals("Name: conductor;\nVersion: 2;", replaced.get("k2"));
assertEquals("Name: conductor;\rVersion: 2;", replaced.get("k3"));
assertEquals("Name: conductor;\r\nVersion: 2;", replaced.get("k4"));
}
@Test
public void testReplaceWithEscapedTags() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("someString", "conductor");
map.put("someNumber", 2);
Map<String, Object> input = new HashMap<>();
input.put(
"k1",
"${$.someString} $${$.someNumber}${$.someNumber} ${$.someNumber}$${$.someString}");
input.put("k2", "$${$.someString}afterText");
input.put("k3", "beforeText$${$.someString}");
input.put("k4", "$${$.someString} afterText");
input.put("k5", "beforeText $${$.someString}");
Map<String, String> mapValue = new HashMap<>();
mapValue.put("a", "${someString}");
mapValue.put("b", "${someNumber}");
mapValue.put("c", "$${someString} ${someNumber}");
input.put("map", mapValue);
List<String> listValue = new ArrayList<>();
listValue.add("${someString}");
listValue.add("${someNumber}");
listValue.add("${someString} $${someNumber}");
input.put("list", listValue);
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
// Verify that values are replaced correctly.
assertEquals("conductor ${$.someNumber}2 2${$.someString}", replaced.get("k1"));
assertEquals("${$.someString}afterText", replaced.get("k2"));
assertEquals("beforeText${$.someString}", replaced.get("k3"));
assertEquals("${$.someString} afterText", replaced.get("k4"));
assertEquals("beforeText ${$.someString}", replaced.get("k5"));
Map replacedMap = (Map) replaced.get("map");
assertEquals("conductor", replacedMap.get("a"));
assertEquals(2, replacedMap.get("b"));
assertEquals("${someString} 2", replacedMap.get("c"));
List replacedList = (List) replaced.get("list");
assertEquals(3, replacedList.size());
assertEquals("conductor", replacedList.get(0));
assertEquals(2, replacedList.get(1));
assertEquals("conductor ${someNumber}", replacedList.get(2));
// Verify that input map is not mutated
Map inputMap = (Map) input.get("map");
assertEquals("${someString}", inputMap.get("a"));
assertEquals("${someNumber}", inputMap.get("b"));
assertEquals("$${someString} ${someNumber}", inputMap.get("c"));
// Verify that input list is not mutated
List inputList = (List) input.get("list");
assertEquals(3, inputList.size());
assertEquals("${someString}", inputList.get(0));
assertEquals("${someNumber}", inputList.get(1));
assertEquals("${someString} $${someNumber}", inputList.get(2));
}
@Test
public void getWorkflowInputHandlesNullInputTemplate() {
WorkflowDef workflowDef = new WorkflowDef();
Map<String, Object> inputParams = Map.of("key", "value");
Map<String, Object> workflowInput =
parametersUtils.getWorkflowInput(workflowDef, inputParams);
assertEquals("value", workflowInput.get("key"));
}
@Test
public void getWorkflowInputFillsInTemplatedFields() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setInputTemplate(Map.of("other_key", "other_value"));
Map<String, Object> inputParams = new HashMap<>(Map.of("key", "value"));
Map<String, Object> workflowInput =
parametersUtils.getWorkflowInput(workflowDef, inputParams);
assertEquals("value", workflowInput.get("key"));
assertEquals("other_value", workflowInput.get("other_key"));
}
@Test
public void getWorkflowInputPreservesExistingFieldsIfPopulated() {
WorkflowDef workflowDef = new WorkflowDef();
String keyName = "key";
workflowDef.setInputTemplate(Map.of(keyName, "templated_value"));
Map<String, Object> inputParams = new HashMap<>(Map.of(keyName, "supplied_value"));
Map<String, Object> workflowInput =
parametersUtils.getWorkflowInput(workflowDef, inputParams);
assertEquals("supplied_value", workflowInput.get(keyName));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/utils/DateTimeUtilsTest.java | core/src/test/java/com/netflix/conductor/core/utils/DateTimeUtilsTest.java | /*
* Copyright 2024 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.time.Duration;
import java.util.stream.Stream;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import static com.netflix.conductor.core.utils.DateTimeUtils.parseDuration;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class DateTimeUtilsTest {
private static Stream<Arguments> validDurations() {
return Stream.of(
Arguments.of("", Duration.ofSeconds(0)),
Arguments.of("5s", Duration.ofSeconds(5)),
Arguments.of("5secs", Duration.ofSeconds(5)),
Arguments.of("5seconds", Duration.ofSeconds(5)),
Arguments.of("5m", Duration.ofMinutes(5)),
Arguments.of("5mins", Duration.ofMinutes(5)),
Arguments.of("5minutes", Duration.ofMinutes(5)),
Arguments.of("5h", Duration.ofHours(5)),
Arguments.of("5hrs", Duration.ofHours(5)),
Arguments.of("5hours", Duration.ofHours(5)),
Arguments.of("5d", Duration.ofDays(5)),
Arguments.of("5days", Duration.ofDays(5)),
Arguments.of("5m 5s", Duration.ofSeconds(5 * 60 + 5)),
Arguments.of("5h 5m 5s", Duration.ofSeconds(5 * 60 * 60 + 5 * 60 + 5)),
Arguments.of(
"5d 5h 5m 5s",
Duration.ofSeconds(5 * 24 * 60 * 60 + 5 * 60 * 60 + 5 * 60 + 5)),
Arguments.of("5S", Duration.ofSeconds(5)),
Arguments.of("5SECS", Duration.ofSeconds(5)),
Arguments.of("5SECONDS", Duration.ofSeconds(5)),
Arguments.of("5M", Duration.ofMinutes(5)),
Arguments.of("5MINS", Duration.ofMinutes(5)),
Arguments.of("5MINUTES", Duration.ofMinutes(5)),
Arguments.of("5H", Duration.ofHours(5)),
Arguments.of("5HRS", Duration.ofHours(5)),
Arguments.of("5HOURS", Duration.ofHours(5)),
Arguments.of("5D", Duration.ofDays(5)),
Arguments.of("5DAYS", Duration.ofDays(5)),
Arguments.of("5M 5S", Duration.ofSeconds(5 * 60 + 5)),
Arguments.of("5H 5M 5S", Duration.ofSeconds(5 * 60 * 60 + 5 * 60 + 5)),
Arguments.of(
"5D 5H 5M 5S",
Duration.ofSeconds(5 * 24 * 60 * 60 + 5 * 60 * 60 + 5 * 60 + 5)));
}
@ParameterizedTest(name = "[{0}] is valid duration")
@MethodSource("validDurations")
public void shouldParseDuration(String input, Duration expectedDuration) {
assertThat(parseDuration(input)).isEqualTo(expectedDuration);
}
@ParameterizedTest(name = "[{0}] is invalid duration")
@ValueSource(
strings = {
"5",
"s",
"secs",
"seconds",
"m",
"mins",
"minutes",
"h",
"hours",
"d",
"days",
"5.0s",
"5.0secs",
"5.0seconds",
"5.0m",
"5.0mins",
"5.0minutes",
"5.0h",
"5.0hrs",
"5.0hours",
"5.0d",
"5.0days",
"5.0m 5s",
"5.0h 5m 5s",
"5.0d 5h 5m 5s",
})
public void shouldValidateDuration(String input) {
assertThatThrownBy(() -> parseDuration(input))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Not valid duration: " + input);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/events/TestDefaultEventProcessor.java | core/src/test/java/com/netflix/conductor/core/events/TestDefaultEventProcessor.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.stubbing.Answer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.retry.support.RetryTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.metadata.events.EventHandler;
import com.netflix.conductor.common.metadata.events.EventHandler.Action;
import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type;
import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow;
import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails;
import com.netflix.conductor.core.config.ConductorCoreConfiguration;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.events.queue.ObservableQueue;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.core.execution.evaluators.JavascriptEvaluator;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.JsonUtils;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.ExecutionService;
import com.netflix.conductor.service.MetadataService;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(
classes = {
TestObjectMapperConfiguration.class,
TestDefaultEventProcessor.TestConfiguration.class,
ConductorCoreConfiguration.class
})
@RunWith(SpringRunner.class)
public class TestDefaultEventProcessor {
private String event;
private ObservableQueue queue;
private MetadataService metadataService;
private ExecutionService executionService;
private WorkflowExecutor workflowExecutor;
private ExternalPayloadStorageUtils externalPayloadStorageUtils;
private SimpleActionProcessor actionProcessor;
private ParametersUtils parametersUtils;
private JsonUtils jsonUtils;
private ConductorProperties properties;
private Message message;
@Autowired private Map<String, Evaluator> evaluators;
@Autowired private ObjectMapper objectMapper;
@Autowired
private @Qualifier("onTransientErrorRetryTemplate") RetryTemplate retryTemplate;
@Configuration
@ComponentScan(basePackageClasses = {Evaluator.class}) // load all Evaluator beans
public static class TestConfiguration {}
@Before
public void setup() {
event = "sqs:arn:account090:sqstest1";
String queueURI = "arn:account090:sqstest1";
metadataService = mock(MetadataService.class);
executionService = mock(ExecutionService.class);
workflowExecutor = mock(WorkflowExecutor.class);
externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
actionProcessor = mock(SimpleActionProcessor.class);
parametersUtils = new ParametersUtils(objectMapper);
jsonUtils = new JsonUtils(objectMapper);
queue = mock(ObservableQueue.class);
message =
new Message(
"t0",
"{\"Type\":\"Notification\",\"MessageId\":\"7e4e6415-01e9-5caf-abaa-37fd05d446ff\",\"Message\":\"{\\n \\\"testKey1\\\": \\\"level1\\\",\\n \\\"metadata\\\": {\\n \\\"testKey2\\\": 123456 }\\n }\",\"Timestamp\":\"2018-08-10T21:22:05.029Z\",\"SignatureVersion\":\"1\"}",
"t0");
when(queue.getURI()).thenReturn(queueURI);
when(queue.getName()).thenReturn(queueURI);
when(queue.getType()).thenReturn("sqs");
properties = mock(ConductorProperties.class);
when(properties.isEventMessageIndexingEnabled()).thenReturn(true);
when(properties.getEventProcessorThreadCount()).thenReturn(2);
}
@Test
public void testEventProcessor() {
// setup event handler
EventHandler eventHandler = new EventHandler();
eventHandler.setName(UUID.randomUUID().toString());
eventHandler.setActive(true);
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "dev");
Action startWorkflowAction = new Action();
startWorkflowAction.setAction(Type.start_workflow);
startWorkflowAction.setStart_workflow(new StartWorkflow());
startWorkflowAction.getStart_workflow().setName("workflow_x");
startWorkflowAction.getStart_workflow().setVersion(1);
startWorkflowAction.getStart_workflow().setTaskToDomain(taskToDomain);
eventHandler.getActions().add(startWorkflowAction);
Action completeTaskAction = new Action();
completeTaskAction.setAction(Type.complete_task);
completeTaskAction.setComplete_task(new TaskDetails());
completeTaskAction.getComplete_task().setTaskRefName("task_x");
completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString());
completeTaskAction.getComplete_task().setOutput(new HashMap<>());
eventHandler.getActions().add(completeTaskAction);
eventHandler.setEvent(event);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(queue.rePublishIfNoAck()).thenReturn(false);
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName(startWorkflowAction.getStart_workflow().getName());
startWorkflowInput.setVersion(startWorkflowAction.getStart_workflow().getVersion());
startWorkflowInput.setCorrelationId(
startWorkflowAction.getStart_workflow().getCorrelationId());
startWorkflowInput.setEvent(event);
String id = UUID.randomUUID().toString();
AtomicBoolean started = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
started.set(true);
return id;
})
.when(workflowExecutor)
.startWorkflow(
argThat(
argument ->
startWorkflowAction
.getStart_workflow()
.getName()
.equals(argument.getName())
&& startWorkflowAction
.getStart_workflow()
.getVersion()
.equals(argument.getVersion())
&& event.equals(argument.getEvent())));
AtomicBoolean completed = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
completed.set(true);
return null;
})
.when(workflowExecutor)
.updateTask(any());
TaskModel task = new TaskModel();
task.setReferenceTaskName(completeTaskAction.getComplete_task().getTaskRefName());
WorkflowModel workflow = new WorkflowModel();
workflow.setTasks(Collections.singletonList(task));
when(workflowExecutor.getWorkflow(
completeTaskAction.getComplete_task().getWorkflowId(), true))
.thenReturn(workflow);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
SimpleActionProcessor actionProcessor =
new SimpleActionProcessor(workflowExecutor, parametersUtils, jsonUtils);
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
assertTrue(started.get());
assertTrue(completed.get());
verify(queue, atMost(1)).ack(any());
verify(queue, never()).nack(any());
verify(queue, never()).publish(any());
}
@Test
public void testEventHandlerWithCondition() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName("cms_intermediate_video_ingest_handler");
eventHandler.setActive(true);
eventHandler.setEvent("sqs:dev_cms_asset_ingest_queue");
eventHandler.setCondition(
"$.Message.testKey1 == 'level1' && $.Message.metadata.testKey2 == 123456");
Map<String, Object> workflowInput = new LinkedHashMap<>();
workflowInput.put("param1", "${Message.metadata.testKey2}");
workflowInput.put("param2", "SQS-${MessageId}");
Action startWorkflowAction = new Action();
startWorkflowAction.setAction(Type.start_workflow);
startWorkflowAction.setStart_workflow(new StartWorkflow());
startWorkflowAction.getStart_workflow().setName("cms_artwork_automation");
startWorkflowAction.getStart_workflow().setVersion(1);
startWorkflowAction.getStart_workflow().setInput(workflowInput);
startWorkflowAction.setExpandInlineJSON(true);
eventHandler.getActions().add(startWorkflowAction);
eventHandler.setEvent(event);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(queue.rePublishIfNoAck()).thenReturn(false);
String id = UUID.randomUUID().toString();
AtomicBoolean started = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
started.set(true);
return id;
})
.when(workflowExecutor)
.startWorkflow(
argThat(
argument ->
startWorkflowAction
.getStart_workflow()
.getName()
.equals(argument.getName())
&& startWorkflowAction
.getStart_workflow()
.getVersion()
.equals(argument.getVersion())
&& event.equals(argument.getEvent())));
SimpleActionProcessor actionProcessor =
new SimpleActionProcessor(workflowExecutor, parametersUtils, jsonUtils);
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
assertTrue(started.get());
}
@Test
public void testEventHandlerWithConditionEvaluator() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName("cms_intermediate_video_ingest_handler");
eventHandler.setActive(true);
eventHandler.setEvent("sqs:dev_cms_asset_ingest_queue");
eventHandler.setEvaluatorType(JavascriptEvaluator.NAME);
eventHandler.setCondition(
"$.Message.testKey1 == 'level1' && $.Message.metadata.testKey2 == 123456");
Map<String, Object> workflowInput = new LinkedHashMap<>();
workflowInput.put("param1", "${Message.metadata.testKey2}");
workflowInput.put("param2", "SQS-${MessageId}");
Action startWorkflowAction = new Action();
startWorkflowAction.setAction(Type.start_workflow);
startWorkflowAction.setStart_workflow(new StartWorkflow());
startWorkflowAction.getStart_workflow().setName("cms_artwork_automation");
startWorkflowAction.getStart_workflow().setVersion(1);
startWorkflowAction.getStart_workflow().setInput(workflowInput);
startWorkflowAction.setExpandInlineJSON(true);
eventHandler.getActions().add(startWorkflowAction);
eventHandler.setEvent(event);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(queue.rePublishIfNoAck()).thenReturn(false);
String id = UUID.randomUUID().toString();
AtomicBoolean started = new AtomicBoolean(false);
doAnswer(
(Answer<String>)
invocation -> {
started.set(true);
return id;
})
.when(workflowExecutor)
.startWorkflow(
argThat(
argument ->
startWorkflowAction
.getStart_workflow()
.getName()
.equals(argument.getName())
&& startWorkflowAction
.getStart_workflow()
.getVersion()
.equals(argument.getVersion())
&& event.equals(argument.getEvent())));
SimpleActionProcessor actionProcessor =
new SimpleActionProcessor(workflowExecutor, parametersUtils, jsonUtils);
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
assertTrue(started.get());
}
@Test
public void testEventProcessorWithRetriableError() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName(UUID.randomUUID().toString());
eventHandler.setActive(true);
eventHandler.setEvent(event);
Action completeTaskAction = new Action();
completeTaskAction.setAction(Type.complete_task);
completeTaskAction.setComplete_task(new TaskDetails());
completeTaskAction.getComplete_task().setTaskRefName("task_x");
completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString());
completeTaskAction.getComplete_task().setOutput(new HashMap<>());
eventHandler.getActions().add(completeTaskAction);
when(queue.rePublishIfNoAck()).thenReturn(false);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(actionProcessor.execute(any(), any(), any(), any()))
.thenThrow(new TransientException("some retriable error"));
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
verify(queue, never()).ack(any());
verify(queue, never()).nack(any());
verify(queue, atLeastOnce()).publish(any());
}
@Test
public void testEventProcessorWithNonRetriableError() {
EventHandler eventHandler = new EventHandler();
eventHandler.setName(UUID.randomUUID().toString());
eventHandler.setActive(true);
eventHandler.setEvent(event);
Action completeTaskAction = new Action();
completeTaskAction.setAction(Type.complete_task);
completeTaskAction.setComplete_task(new TaskDetails());
completeTaskAction.getComplete_task().setTaskRefName("task_x");
completeTaskAction.getComplete_task().setWorkflowId(UUID.randomUUID().toString());
completeTaskAction.getComplete_task().setOutput(new HashMap<>());
eventHandler.getActions().add(completeTaskAction);
when(metadataService.getEventHandlersForEvent(event, true))
.thenReturn(Collections.singletonList(eventHandler));
when(executionService.addEventExecution(any())).thenReturn(true);
when(actionProcessor.execute(any(), any(), any(), any()))
.thenThrow(new IllegalArgumentException("some non-retriable error"));
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
eventProcessor.handle(queue, message);
verify(queue, atMost(1)).ack(any());
verify(queue, never()).publish(any());
}
@Test
public void testExecuteInvalidAction() {
AtomicInteger executeInvoked = new AtomicInteger(0);
doAnswer(
(Answer<Map<String, Object>>)
invocation -> {
executeInvoked.incrementAndGet();
throw new UnsupportedOperationException("error");
})
.when(actionProcessor)
.execute(any(), any(), any(), any());
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
EventExecution eventExecution = new EventExecution("id", "messageId");
eventExecution.setName("handler");
eventExecution.setStatus(EventExecution.Status.IN_PROGRESS);
eventExecution.setEvent("event");
Action action = new Action();
eventExecution.setAction(Type.start_workflow);
eventProcessor.execute(eventExecution, action, "payload");
assertEquals(1, executeInvoked.get());
assertEquals(EventExecution.Status.FAILED, eventExecution.getStatus());
assertNotNull(eventExecution.getOutput().get("exception"));
}
@Test
public void testExecuteNonRetriableException() {
AtomicInteger executeInvoked = new AtomicInteger(0);
doAnswer(
(Answer<Map<String, Object>>)
invocation -> {
executeInvoked.incrementAndGet();
throw new IllegalArgumentException("some non-retriable error");
})
.when(actionProcessor)
.execute(any(), any(), any(), any());
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
EventExecution eventExecution = new EventExecution("id", "messageId");
eventExecution.setStatus(EventExecution.Status.IN_PROGRESS);
eventExecution.setEvent("event");
eventExecution.setName("handler");
Action action = new Action();
action.setAction(Type.start_workflow);
eventExecution.setAction(Type.start_workflow);
eventProcessor.execute(eventExecution, action, "payload");
assertEquals(1, executeInvoked.get());
assertEquals(EventExecution.Status.FAILED, eventExecution.getStatus());
assertNotNull(eventExecution.getOutput().get("exception"));
}
@Test
public void testExecuteTransientException() {
AtomicInteger executeInvoked = new AtomicInteger(0);
doAnswer(
(Answer<Map<String, Object>>)
invocation -> {
executeInvoked.incrementAndGet();
throw new TransientException("some retriable error");
})
.when(actionProcessor)
.execute(any(), any(), any(), any());
DefaultEventProcessor eventProcessor =
new DefaultEventProcessor(
executionService,
metadataService,
actionProcessor,
jsonUtils,
properties,
objectMapper,
evaluators,
retryTemplate);
EventExecution eventExecution = new EventExecution("id", "messageId");
eventExecution.setStatus(EventExecution.Status.IN_PROGRESS);
eventExecution.setEvent("event");
Action action = new Action();
action.setAction(Type.start_workflow);
eventProcessor.execute(eventExecution, action, "payload");
assertEquals(3, executeInvoked.get());
assertNull(eventExecution.getOutput().get("exception"));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/events/TestSimpleActionProcessor.java | core/src/test/java/com/netflix/conductor/core/events/TestSimpleActionProcessor.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.events.EventHandler.Action;
import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type;
import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow;
import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.metadata.tasks.TaskResult.Status;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.JsonUtils;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
public class TestSimpleActionProcessor {
private WorkflowExecutor workflowExecutor;
private ExternalPayloadStorageUtils externalPayloadStorageUtils;
private SimpleActionProcessor actionProcessor;
@Autowired private ObjectMapper objectMapper;
@Before
public void setup() {
externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
workflowExecutor = mock(WorkflowExecutor.class);
actionProcessor =
new SimpleActionProcessor(
workflowExecutor,
new ParametersUtils(objectMapper),
new JsonUtils(objectMapper));
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
public void testStartWorkflow_correlationId() throws Exception {
StartWorkflow startWorkflow = new StartWorkflow();
startWorkflow.setName("testWorkflow");
startWorkflow.getInput().put("testInput", "${testId}");
startWorkflow.setCorrelationId("${correlationId}");
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "dev");
startWorkflow.setTaskToDomain(taskToDomain);
Action action = new Action();
action.setAction(Type.start_workflow);
action.setStart_workflow(startWorkflow);
Object payload =
objectMapper.readValue(
"{\"correlationId\":\"test-id\", \"testId\":\"test_1\"}", Object.class);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testWorkflow");
workflowDef.setVersion(1);
when(workflowExecutor.startWorkflow(any())).thenReturn("workflow_1");
Map<String, Object> output =
actionProcessor.execute(action, payload, "testEvent", "testMessage");
assertNotNull(output);
assertEquals("workflow_1", output.get("workflowId"));
ArgumentCaptor<StartWorkflowInput> startWorkflowInputArgumentCaptor =
ArgumentCaptor.forClass(StartWorkflowInput.class);
verify(workflowExecutor).startWorkflow(startWorkflowInputArgumentCaptor.capture());
StartWorkflowInput capturedValue = startWorkflowInputArgumentCaptor.getValue();
assertEquals("test_1", capturedValue.getWorkflowInput().get("testInput"));
assertEquals("test-id", capturedValue.getCorrelationId());
assertEquals(
"testMessage", capturedValue.getWorkflowInput().get("conductor.event.messageId"));
assertEquals("testEvent", capturedValue.getWorkflowInput().get("conductor.event.name"));
assertEquals(taskToDomain, capturedValue.getTaskToDomain());
}
@Test
public void testStartWorkflow_taskDomain() throws Exception {
StartWorkflow startWorkflow = new StartWorkflow();
startWorkflow.setName("testWorkflow");
startWorkflow.getInput().put("testInput", "${testId}");
Action action = new Action();
action.setAction(Type.start_workflow);
action.setStart_workflow(startWorkflow);
Object payload =
objectMapper.readValue(
"{ \"testId\": \"test_1\", \"taskToDomain\":{\"testTask\":\"testDomain\"} }",
Object.class);
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("testTask", "testDomain");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testWorkflow");
workflowDef.setVersion(1);
when(workflowExecutor.startWorkflow(any())).thenReturn("workflow_1");
Map<String, Object> output =
actionProcessor.execute(action, payload, "testEvent", "testMessage");
assertNotNull(output);
assertEquals("workflow_1", output.get("workflowId"));
ArgumentCaptor<StartWorkflowInput> startWorkflowInputArgumentCaptor =
ArgumentCaptor.forClass(StartWorkflowInput.class);
verify(workflowExecutor).startWorkflow(startWorkflowInputArgumentCaptor.capture());
StartWorkflowInput capturedValue = startWorkflowInputArgumentCaptor.getValue();
assertEquals("test_1", capturedValue.getWorkflowInput().get("testInput"));
assertEquals(taskToDomain, capturedValue.getTaskToDomain());
assertEquals(
"testMessage", capturedValue.getWorkflowInput().get("conductor.event.messageId"));
assertEquals("testEvent", capturedValue.getWorkflowInput().get("conductor.event.name"));
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
public void testStartWorkflow() throws Exception {
StartWorkflow startWorkflow = new StartWorkflow();
startWorkflow.setName("testWorkflow");
startWorkflow.getInput().put("testInput", "${testId}");
Map<String, String> taskToDomain = new HashMap<>();
taskToDomain.put("*", "dev");
startWorkflow.setTaskToDomain(taskToDomain);
Action action = new Action();
action.setAction(Type.start_workflow);
action.setStart_workflow(startWorkflow);
Object payload = objectMapper.readValue("{\"testId\":\"test_1\"}", Object.class);
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("testWorkflow");
workflowDef.setVersion(1);
when(workflowExecutor.startWorkflow(any())).thenReturn("workflow_1");
Map<String, Object> output =
actionProcessor.execute(action, payload, "testEvent", "testMessage");
assertNotNull(output);
assertEquals("workflow_1", output.get("workflowId"));
ArgumentCaptor<StartWorkflowInput> startWorkflowInputArgumentCaptor =
ArgumentCaptor.forClass(StartWorkflowInput.class);
verify(workflowExecutor).startWorkflow(startWorkflowInputArgumentCaptor.capture());
StartWorkflowInput capturedArgument = startWorkflowInputArgumentCaptor.getValue();
assertEquals("test_1", capturedArgument.getWorkflowInput().get("testInput"));
assertNull(capturedArgument.getCorrelationId());
assertEquals(
"testMessage",
capturedArgument.getWorkflowInput().get("conductor.event.messageId"));
assertEquals("testEvent", capturedArgument.getWorkflowInput().get("conductor.event.name"));
assertEquals(taskToDomain, capturedArgument.getTaskToDomain());
}
@Test
public void testCompleteTask() throws Exception {
TaskDetails taskDetails = new TaskDetails();
taskDetails.setWorkflowId("${workflowId}");
taskDetails.setTaskRefName("testTask");
taskDetails.getOutput().put("someNEKey", "${Message.someNEKey}");
taskDetails.getOutput().put("someKey", "${Message.someKey}");
taskDetails.getOutput().put("someNullKey", "${Message.someNullKey}");
Action action = new Action();
action.setAction(Type.complete_task);
action.setComplete_task(taskDetails);
String payloadJson =
"{\"workflowId\":\"workflow_1\",\"Message\":{\"someKey\":\"someData\",\"someNullKey\":null}}";
Object payload = objectMapper.readValue(payloadJson, Object.class);
TaskModel task = new TaskModel();
task.setReferenceTaskName("testTask");
WorkflowModel workflow = new WorkflowModel();
workflow.getTasks().add(task);
when(workflowExecutor.getWorkflow(eq("workflow_1"), anyBoolean())).thenReturn(workflow);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
actionProcessor.execute(action, payload, "testEvent", "testMessage");
ArgumentCaptor<TaskResult> argumentCaptor = ArgumentCaptor.forClass(TaskResult.class);
verify(workflowExecutor).updateTask(argumentCaptor.capture());
assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus());
assertEquals(
"testMessage",
argumentCaptor.getValue().getOutputData().get("conductor.event.messageId"));
assertEquals(
"testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name"));
assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId"));
assertEquals("testTask", argumentCaptor.getValue().getOutputData().get("taskRefName"));
assertEquals("someData", argumentCaptor.getValue().getOutputData().get("someKey"));
// Assert values not in message are evaluated to null
assertTrue("testTask", argumentCaptor.getValue().getOutputData().containsKey("someNEKey"));
// Assert null values from message are kept
assertTrue(
"testTask", argumentCaptor.getValue().getOutputData().containsKey("someNullKey"));
assertNull("testTask", argumentCaptor.getValue().getOutputData().get("someNullKey"));
}
@Test
public void testCompleteLoopOverTask() throws Exception {
TaskDetails taskDetails = new TaskDetails();
taskDetails.setWorkflowId("${workflowId}");
taskDetails.setTaskRefName("testTask");
taskDetails.getOutput().put("someNEKey", "${Message.someNEKey}");
taskDetails.getOutput().put("someKey", "${Message.someKey}");
taskDetails.getOutput().put("someNullKey", "${Message.someNullKey}");
Action action = new Action();
action.setAction(Type.complete_task);
action.setComplete_task(taskDetails);
String payloadJson =
"{\"workflowId\":\"workflow_1\", \"taskRefName\":\"testTask\", \"Message\":{\"someKey\":\"someData\",\"someNullKey\":null}}";
Object payload = objectMapper.readValue(payloadJson, Object.class);
TaskModel task = new TaskModel();
task.setIteration(1);
task.setReferenceTaskName("testTask__1");
WorkflowModel workflow = new WorkflowModel();
workflow.getTasks().add(task);
when(workflowExecutor.getWorkflow(eq("workflow_1"), anyBoolean())).thenReturn(workflow);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
actionProcessor.execute(action, payload, "testEvent", "testMessage");
ArgumentCaptor<TaskResult> argumentCaptor = ArgumentCaptor.forClass(TaskResult.class);
verify(workflowExecutor).updateTask(argumentCaptor.capture());
assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus());
assertEquals(
"testMessage",
argumentCaptor.getValue().getOutputData().get("conductor.event.messageId"));
assertEquals(
"testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name"));
assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId"));
assertEquals("testTask", argumentCaptor.getValue().getOutputData().get("taskRefName"));
assertEquals("someData", argumentCaptor.getValue().getOutputData().get("someKey"));
// Assert values not in message are evaluated to null
assertTrue("testTask", argumentCaptor.getValue().getOutputData().containsKey("someNEKey"));
// Assert null values from message are kept
assertTrue(
"testTask", argumentCaptor.getValue().getOutputData().containsKey("someNullKey"));
assertNull("testTask", argumentCaptor.getValue().getOutputData().get("someNullKey"));
}
@Test
public void testCompleteTaskByTaskId() throws Exception {
TaskDetails taskDetails = new TaskDetails();
taskDetails.setWorkflowId("${workflowId}");
taskDetails.setTaskId("${taskId}");
Action action = new Action();
action.setAction(Type.complete_task);
action.setComplete_task(taskDetails);
Object payload =
objectMapper.readValue(
"{\"workflowId\":\"workflow_1\", \"taskId\":\"task_1\"}", Object.class);
TaskModel task = new TaskModel();
task.setTaskId("task_1");
task.setReferenceTaskName("testTask");
when(workflowExecutor.getTask(eq("task_1"))).thenReturn(task);
doNothing().when(externalPayloadStorageUtils).verifyAndUpload(any(), any());
actionProcessor.execute(action, payload, "testEvent", "testMessage");
ArgumentCaptor<TaskResult> argumentCaptor = ArgumentCaptor.forClass(TaskResult.class);
verify(workflowExecutor).updateTask(argumentCaptor.capture());
assertEquals(Status.COMPLETED, argumentCaptor.getValue().getStatus());
assertEquals(
"testMessage",
argumentCaptor.getValue().getOutputData().get("conductor.event.messageId"));
assertEquals(
"testEvent", argumentCaptor.getValue().getOutputData().get("conductor.event.name"));
assertEquals("workflow_1", argumentCaptor.getValue().getOutputData().get("workflowId"));
assertEquals("task_1", argumentCaptor.getValue().getOutputData().get("taskId"));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/events/MockQueueProvider.java | core/src/test/java/com/netflix/conductor/core/events/MockQueueProvider.java | /*
* Copyright 2020 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import org.springframework.lang.NonNull;
import com.netflix.conductor.core.events.queue.ObservableQueue;
public class MockQueueProvider implements EventQueueProvider {
private final String type;
public MockQueueProvider(String type) {
this.type = type;
}
@Override
public String getQueueType() {
return "mock";
}
@Override
@NonNull
public ObservableQueue getQueue(String queueURI) {
return new MockObservableQueue(queueURI, queueURI, type);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/events/TestGraalJSFeatures.java | core/src/test/java/com/netflix/conductor/core/events/TestGraalJSFeatures.java | /*
* Copyright 2025 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.*;
import org.junit.Test;
import com.netflix.conductor.core.exception.NonTransientException;
import com.netflix.conductor.core.execution.evaluators.ConsoleBridge;
import static org.junit.Assert.*;
public class TestGraalJSFeatures {
@Test
public void testES6ConstLet() {
Map<String, Object> input = new HashMap<>();
input.put("value", 42);
String script =
"""
(function() {
const x = $.value;
let y = x * 2;
return y;
})()""";
Object result = ScriptEvaluator.eval(script, input);
assertEquals(84, ((Number) result).intValue());
}
@Test
public void testArrowFunctions() {
Map<String, Object> input = new HashMap<>();
List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5);
input.put("numbers", numbers);
String script = "$.numbers.map(x => x * 2)";
Object result = ScriptEvaluator.eval(script, input);
assertTrue(result instanceof List);
@SuppressWarnings("unchecked")
List<Object> resultList = (List<Object>) result;
assertEquals(5, resultList.size());
assertEquals(2, ((Number) resultList.get(0)).intValue());
assertEquals(10, ((Number) resultList.get(4)).intValue());
}
@Test
public void testTemplateLiterals() {
Map<String, Object> input = new HashMap<>();
input.put("name", "Conductor");
input.put("version", "3.0");
String script = "`${$.name} v${$.version}`";
Object result = ScriptEvaluator.eval(script, input);
assertEquals("Conductor v3.0", result);
}
@Test
public void testDestructuring() {
Map<String, Object> input = new HashMap<>();
Map<String, Object> user = new HashMap<>();
user.put("name", "Alice");
user.put("age", 30);
input.put("user", user);
String script =
"""
(function() {
const { name, age } = $.user;
return name + ' is ' + age;
})()""";
Object result = ScriptEvaluator.eval(script, input);
assertEquals("Alice is 30", result);
}
@Test
public void testSpreadOperator() {
Map<String, Object> input = new HashMap<>();
List<Integer> arr1 = Arrays.asList(1, 2, 3);
List<Integer> arr2 = Arrays.asList(4, 5, 6);
input.put("arr1", arr1);
input.put("arr2", arr2);
String script = "[...$.arr1, ...$.arr2]";
Object result = ScriptEvaluator.eval(script, input);
assertTrue(result instanceof List);
@SuppressWarnings("unchecked")
List<Object> resultList = (List<Object>) result;
assertEquals(6, resultList.size());
}
@Test
public void testPromiseSupport() {
Map<String, Object> input = new HashMap<>();
input.put("value", 100);
// GraalJS supports Promise
String script =
"""
(function() {
return Promise.resolve($.value).then(x => x * 2);
})()""";
Object result = ScriptEvaluator.eval(script, input);
// The promise object itself is returned, not the resolved value
// since we're not using async/await
assertNotNull(result);
}
@Test
public void testComplexObjectManipulation() {
Map<String, Object> input = new HashMap<>();
Map<String, Object> workflow = new HashMap<>();
workflow.put("name", "test-workflow");
workflow.put("version", 1);
List<Map<String, Object>> tasks = new ArrayList<>();
Map<String, Object> task1 = new HashMap<>();
task1.put("name", "task1");
task1.put("status", "COMPLETED");
tasks.add(task1);
Map<String, Object> task2 = new HashMap<>();
task2.put("name", "task2");
task2.put("status", "IN_PROGRESS");
tasks.add(task2);
workflow.put("tasks", tasks);
input.put("workflow", workflow);
String script =
"""
$.workflow.tasks
.filter(t => t.status === 'COMPLETED')
.map(t => t.name)
.join(',')""";
Object result = ScriptEvaluator.eval(script, input);
assertEquals("task1", result);
}
@Test(expected = NonTransientException.class)
public void testTimeoutProtection() {
Map<String, Object> input = new HashMap<>();
// This should timeout after 4 seconds (default)
String script = "while(true) {}";
ScriptEvaluator.eval(script, input);
}
@Test
public void testConsoleBridge() {
Map<String, Object> input = new HashMap<>();
input.put("message", "Hello from GraalJS");
ConsoleBridge console = new ConsoleBridge("test-task-id");
String script =
"""
(function() {
console.log('Starting execution');
console.info($.message);
console.error('This is an error');
return $.message;
})()
""";
Object result = ScriptEvaluator.eval(script, input, console);
assertEquals("Hello from GraalJS", result);
assertEquals(3, console.logs().size());
assertTrue(console.logs().get(0).getLog().contains("[Log]"));
assertTrue(console.logs().get(1).getLog().contains("[Info]"));
assertTrue(console.logs().get(2).getLog().contains("[Error]"));
}
@Test
public void testNullAndUndefinedHandling() {
Map<String, Object> input = new HashMap<>();
input.put("nullValue", null);
String script1 = "$.nullValue === null";
assertTrue((Boolean) ScriptEvaluator.eval(script1, input));
String script2 = "$.undefinedValue === undefined";
assertTrue((Boolean) ScriptEvaluator.eval(script2, input));
String script3 = "$.nullValue ?? 'default'";
assertEquals("default", ScriptEvaluator.eval(script3, input));
}
@Test
public void testArrayMethods() {
Map<String, Object> input = new HashMap<>();
List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
input.put("numbers", numbers);
// Test filter + reduce
String script = "$.numbers.filter(n => n % 2 === 0).reduce((a, b) => a + b, 0)";
Object result = ScriptEvaluator.eval(script, input);
assertEquals(30, ((Number) result).intValue()); // 2+4+6+8+10 = 30
// Test some/every
String script2 = "$.numbers.some(n => n > 5)";
assertTrue((Boolean) ScriptEvaluator.eval(script2, input));
String script3 = "$.numbers.every(n => n > 0)";
assertTrue((Boolean) ScriptEvaluator.eval(script3, input));
}
@Test
public void testObjectMethods() {
Map<String, Object> input = new HashMap<>();
Map<String, Object> obj = new HashMap<>();
obj.put("a", 1);
obj.put("b", 2);
obj.put("c", 3);
input.put("obj", obj);
// Test that we can access object properties
String script1 = "$.obj.a + $.obj.b + $.obj.c";
Object result1 = ScriptEvaluator.eval(script1, input);
assertEquals(6, ((Number) result1).intValue());
// Test Object.keys works on JS objects we create
String script2 =
"""
(function() {
const obj = { a: 1, b: 2, c: 3 };
return Object.keys(obj).length;
})()
""";
Object result2 = ScriptEvaluator.eval(script2, input);
assertEquals(3, ((Number) result2).intValue());
}
@Test
public void testStringMethods() {
Map<String, Object> input = new HashMap<>();
input.put("text", "hello world");
String script1 = "$.text.toUpperCase()";
assertEquals("HELLO WORLD", ScriptEvaluator.eval(script1, input));
String script2 = "$.text.split(' ').reverse().join(' ')";
assertEquals("world hello", ScriptEvaluator.eval(script2, input));
String script3 = "$.text.includes('world')";
assertTrue((Boolean) ScriptEvaluator.eval(script3, input));
String script4 = "$.text.startsWith('hello')";
assertTrue((Boolean) ScriptEvaluator.eval(script4, input));
}
@Test
public void testMathOperations() {
Map<String, Object> input = new HashMap<>();
input.put("value", 16);
String script1 = "Math.sqrt($.value)";
Object result1 = ScriptEvaluator.eval(script1, input);
assertEquals(4.0, ((Number) result1).doubleValue(), 0.001);
String script2 = "Math.pow($.value, 2)";
Object result2 = ScriptEvaluator.eval(script2, input);
assertEquals(256.0, ((Number) result2).doubleValue(), 0.001);
String script3 = "Math.max(1, $.value, 5)";
Object result3 = ScriptEvaluator.eval(script3, input);
assertEquals(16, ((Number) result3).intValue());
}
@Test
public void testNestedObjectAccess() {
Map<String, Object> input = new HashMap<>();
Map<String, Object> level1 = new HashMap<>();
Map<String, Object> level2 = new HashMap<>();
Map<String, Object> level3 = new HashMap<>();
level3.put("value", "deep");
level2.put("level3", level3);
level1.put("level2", level2);
input.put("level1", level1);
String script = "$.level1.level2.level3.value";
assertEquals("deep", ScriptEvaluator.eval(script, input));
// Test optional chaining (ES2020 feature)
String script2 = "$.level1?.level2?.level3?.value";
assertEquals("deep", ScriptEvaluator.eval(script2, input));
String script3 = "$.level1?.missing?.value ?? 'not found'";
assertEquals("not found", ScriptEvaluator.eval(script3, input));
}
@Test
public void testJSONOperations() {
Map<String, Object> input = new HashMap<>();
input.put("name", "test");
input.put("count", 42);
// Test that we can access data
String script1 = "$.name";
assertEquals("test", ScriptEvaluator.eval(script1, input));
// Test JSON operations with JS objects
String script2 =
"""
(function() {
const obj = { name: $.name, count: $.count };
const json = JSON.stringify(obj);
const parsed = JSON.parse(json);
return parsed.count;
})()
""";
Object result2 = ScriptEvaluator.eval(script2, input);
assertEquals(42, ((Number) result2).intValue());
// Test JSON.parse
String script3 = "JSON.parse('{\"value\":123}').value";
Object result3 = ScriptEvaluator.eval(script3, input);
assertEquals(123, ((Number) result3).intValue());
}
@Test
public void testContextPooling() {
// Test that context pooling can be enabled and works correctly
// Note: Context pooling is controlled by environment variables
// This test verifies the code paths work with pooling disabled (default)
Map<String, Object> input = new HashMap<>();
input.put("value", 42);
// Multiple evaluations should work correctly without pooling
for (int i = 0; i < 5; i++) {
String script = "$.value * " + (i + 1);
Object result = ScriptEvaluator.eval(script, input);
assertEquals(42 * (i + 1), ((Number) result).intValue());
}
}
@Test
public void testScriptEvaluatorInitialization() {
// Test that ScriptEvaluator initializes properly with defaults
// This verifies the self-initializing behavior
Map<String, Object> input = new HashMap<>();
input.put("test", "value");
// First evaluation should trigger initialization
String script = "$.test";
Object result = ScriptEvaluator.eval(script, input);
assertEquals("value", result);
// Subsequent evaluations should use the initialized state
result = ScriptEvaluator.eval(script, input);
assertEquals("value", result);
}
@Test
public void testDeepCopyBehavior() {
// Test that ScriptEvaluator works correctly with complex nested objects
// Note: Deep copy protection is implemented in JavascriptEvaluator layer
// This test verifies ScriptEvaluator can handle nested structures
Map<String, Object> input = new HashMap<>();
Map<String, Object> nested = new HashMap<>();
nested.put("original", "value");
input.put("data", nested);
// Script that accesses nested data
String script =
"""
(function() {
return $.data.original + ' modified';
})()
""";
Object result = ScriptEvaluator.eval(script, input);
assertEquals("value modified", result);
// Original input should still have its data intact
assertEquals("value", nested.get("original"));
}
@Test
public void testMultipleScriptExecutions() {
// Test that multiple scripts can execute concurrently without interference
Map<String, Object> input1 = new HashMap<>();
input1.put("value", 10);
Map<String, Object> input2 = new HashMap<>();
input2.put("value", 20);
String script1 = "$.value * 2";
String script2 = "$.value * 3";
Object result1 = ScriptEvaluator.eval(script1, input1);
Object result2 = ScriptEvaluator.eval(script2, input2);
assertEquals(20, ((Number) result1).intValue());
assertEquals(60, ((Number) result2).intValue());
}
@Test
public void testErrorMessageWithLineNumber() {
// Test that error messages include line number information
Map<String, Object> input = new HashMap<>();
String script =
"""
(function() {
const x = 1;
const y = 2;
throw new Error('Test error on line 4');
})()
""";
try {
ScriptEvaluator.eval(script, input);
fail("Should have thrown TerminateWorkflowException");
} catch (Exception e) {
// Error message should contain line information
assertTrue(
"Error message should contain 'line'",
e.getMessage().toLowerCase().contains("line")
|| e.getCause() != null
&& e.getCause().getMessage().toLowerCase().contains("line"));
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/events/TestScriptEval.java | core/src/test/java/com/netflix/conductor/core/events/TestScriptEval.java | /*
* Copyright 2025 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestScriptEval {
@Test
public void testScript() throws Exception {
Map<String, Object> payload = new HashMap<>();
Map<String, Object> app = new HashMap<>();
app.put("name", "conductor");
app.put("version", 2.0);
app.put("license", "Apache 2.0");
payload.put("app", app);
payload.put("author", "Netflix");
payload.put("oss", true);
String script1 = "$.app.name == 'conductor'"; // true
String script2 = "$.version > 3"; // false
String script3 = "$.oss"; // true
String script4 = "$.author == 'me'"; // false
assertTrue(ScriptEvaluator.evalBool(script1, payload));
assertFalse(ScriptEvaluator.evalBool(script2, payload));
assertTrue(ScriptEvaluator.evalBool(script3, payload));
assertFalse(ScriptEvaluator.evalBool(script4, payload));
}
@Test
public void testES6Support() throws Exception {
Map<String, Object> payload = new HashMap<>();
Map<String, Object> app = new HashMap<>();
app.put("name", "conductor");
app.put("version", 2.0);
app.put("license", "Apache 2.0");
payload.put("app", app);
payload.put("author", "Netflix");
payload.put("oss", true);
// GraalJS supports ES6 by default, no need for environment variable
String script1 =
"""
(function(){\s
const variable = 1; // const support => es6\s
return $.app.name == 'conductor';})();"""; // true
assertTrue(ScriptEvaluator.evalBool(script1, payload));
}
@Test
public void testArrayAndObjectHandling() throws Exception {
Map<String, Object> payload = new HashMap<>();
payload.put("numbers", new int[] {1, 2, 3, 4, 5});
String script = "$.numbers.length > 3";
assertTrue(ScriptEvaluator.evalBool(script, payload));
String sumScript = "$.numbers.reduce((a, b) => a + b, 0)";
Object result = ScriptEvaluator.eval(sumScript, payload);
assertEquals(15, ((Number) result).intValue());
}
@Test
public void testNullHandling() throws Exception {
Map<String, Object> payload = new HashMap<>();
payload.put("value", null);
String script = "$.value == null";
assertTrue(ScriptEvaluator.evalBool(script, payload));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/events/MockObservableQueue.java | core/src/test/java/com/netflix/conductor/core/events/MockObservableQueue.java | /*
* Copyright 2020 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.events;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.events.queue.ObservableQueue;
import rx.Observable;
public class MockObservableQueue implements ObservableQueue {
private final String uri;
private final String name;
private final String type;
private final Set<Message> messages = new TreeSet<>(Comparator.comparing(Message::getId));
public MockObservableQueue(String uri, String name, String type) {
this.uri = uri;
this.name = name;
this.type = type;
}
@Override
public Observable<Message> observe() {
return Observable.from(messages);
}
public String getType() {
return type;
}
@Override
public String getName() {
return name;
}
@Override
public String getURI() {
return uri;
}
@Override
public List<String> ack(List<Message> msgs) {
messages.removeAll(msgs);
return msgs.stream().map(Message::getId).collect(Collectors.toList());
}
@Override
public void publish(List<Message> messages) {
this.messages.addAll(messages);
}
@Override
public void setUnackTimeout(Message message, long unackTimeout) {}
@Override
public long size() {
return messages.size();
}
@Override
public String toString() {
return "MockObservableQueue [uri=" + uri + ", name=" + name + ", type=" + type + "]";
}
@Override
public void start() {}
@Override
public void stop() {}
@Override
public boolean isRunning() {
return false;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/reconciliation/TestWorkflowSweeper.java | core/src/test/java/com/netflix/conductor/core/reconciliation/TestWorkflowSweeper.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.reconciliation;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.TaskModel.Status;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.ExecutionLockService;
import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TestWorkflowSweeper {
private ConductorProperties properties;
private WorkflowExecutor workflowExecutor;
private WorkflowRepairService workflowRepairService;
private QueueDAO queueDAO;
private ExecutionDAOFacade executionDAOFacade;
private WorkflowSweeper workflowSweeper;
private ExecutionLockService executionLockService;
private int defaultPostPoneOffSetSeconds = 1800;
private int defaulMmaxPostponeDurationSeconds = 2000000;
@Before
public void setUp() {
properties = mock(ConductorProperties.class);
workflowExecutor = mock(WorkflowExecutor.class);
queueDAO = mock(QueueDAO.class);
workflowRepairService = mock(WorkflowRepairService.class);
executionDAOFacade = mock(ExecutionDAOFacade.class);
executionLockService = mock(ExecutionLockService.class);
workflowSweeper =
new WorkflowSweeper(
workflowExecutor,
Optional.of(workflowRepairService),
properties,
queueDAO,
executionDAOFacade,
executionLockService);
}
@Test
public void testPostponeDurationForHumanTaskType() {
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_HUMAN);
taskModel.setStatus(Status.IN_PROGRESS);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
when(properties.getMaxPostponeDurationSeconds())
.thenReturn(Duration.ofSeconds(defaulMmaxPostponeDurationSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
defaultPostPoneOffSetSeconds * 1000);
}
@Test
public void testPostponeDurationForWaitTaskType() {
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_WAIT);
taskModel.setStatus(Status.IN_PROGRESS);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
when(properties.getMaxPostponeDurationSeconds())
.thenReturn(Duration.ofSeconds(defaulMmaxPostponeDurationSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
defaultPostPoneOffSetSeconds * 1000);
}
@Test
public void testPostponeDurationForWaitTaskTypeWithLongWaitTime() {
long waitTimeout = 65845;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_WAIT);
taskModel.setStatus(Status.IN_PROGRESS);
taskModel.setWaitTimeout(System.currentTimeMillis() + waitTimeout);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
when(properties.getMaxPostponeDurationSeconds())
.thenReturn(Duration.ofSeconds(defaulMmaxPostponeDurationSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), (waitTimeout / 1000) * 1000);
}
@Test
public void testPostponeDurationForWaitTaskTypeWithLessOneSecondWaitTime() {
long waitTimeout = 180;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_WAIT);
taskModel.setStatus(Status.IN_PROGRESS);
taskModel.setWaitTimeout(System.currentTimeMillis() + waitTimeout);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), (waitTimeout / 1000) * 1000);
}
@Test
public void testPostponeDurationForWaitTaskTypeWithZeroWaitTime() {
long waitTimeout = 0;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_WAIT);
taskModel.setStatus(Status.IN_PROGRESS);
taskModel.setWaitTimeout(System.currentTimeMillis() + waitTimeout);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), (waitTimeout / 1000) * 1000);
}
@Test
public void testPostponeDurationForTaskInProgress() {
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_SIMPLE);
taskModel.setStatus(Status.IN_PROGRESS);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
when(properties.getMaxPostponeDurationSeconds())
.thenReturn(Duration.ofSeconds(defaulMmaxPostponeDurationSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
defaultPostPoneOffSetSeconds * 1000);
}
@Test
public void testPostponeDurationForTaskInProgressWithResponseTimeoutSet() {
long responseTimeout = 200;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_SIMPLE);
taskModel.setStatus(Status.IN_PROGRESS);
taskModel.setResponseTimeoutSeconds(responseTimeout);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
when(properties.getMaxPostponeDurationSeconds())
.thenReturn(Duration.ofSeconds(defaulMmaxPostponeDurationSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), (responseTimeout + 1) * 1000);
}
@Test
public void
testPostponeDurationForTaskInProgressWithResponseTimeoutSetLongerThanMaxPostponeDuration() {
long responseTimeout = defaulMmaxPostponeDurationSeconds + 1;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_SIMPLE);
taskModel.setStatus(Status.IN_PROGRESS);
taskModel.setResponseTimeoutSeconds(responseTimeout);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
when(properties.getMaxPostponeDurationSeconds())
.thenReturn(Duration.ofSeconds(defaulMmaxPostponeDurationSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
defaulMmaxPostponeDurationSeconds * 1000L);
}
@Test
public void testPostponeDurationForTaskInScheduled() {
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
WorkflowDef workflowDef = new WorkflowDef();
workflowModel.setWorkflowDefinition(workflowDef);
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_SIMPLE);
taskModel.setStatus(Status.SCHEDULED);
taskModel.setReferenceTaskName("task1");
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
defaultPostPoneOffSetSeconds * 1000);
}
@Test
public void testPostponeDurationForTaskInScheduledWithWorkflowTimeoutSet() {
long workflowTimeout = 1800;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setTimeoutSeconds(workflowTimeout);
workflowModel.setWorkflowDefinition(workflowDef);
TaskModel taskModel = new TaskModel();
taskModel.setTaskId("task1");
taskModel.setTaskType(TaskType.TASK_TYPE_SIMPLE);
taskModel.setStatus(Status.SCHEDULED);
workflowModel.setTasks(List.of(taskModel));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), (workflowTimeout + 1) * 1000);
}
@Test
public void testPostponeDurationForTaskInScheduledWithWorkflowTimeoutSetAndNoPollTimeout() {
long workflowTimeout = 1800;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setTimeoutSeconds(workflowTimeout);
workflowModel.setWorkflowDefinition(workflowDef);
TaskDef taskDef = new TaskDef();
TaskModel taskModel = mock(TaskModel.class);
workflowModel.setTasks(List.of(taskModel));
when(taskModel.getTaskDefinition()).thenReturn(Optional.of(taskDef));
when(taskModel.getStatus()).thenReturn(Status.SCHEDULED);
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), (workflowTimeout + 1) * 1000);
}
@Test
public void testPostponeDurationForTaskInScheduledWithNoWorkflowTimeoutSetAndNoPollTimeout() {
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
WorkflowDef workflowDef = new WorkflowDef();
workflowModel.setWorkflowDefinition(workflowDef);
TaskDef taskDef = new TaskDef();
TaskModel taskModel = mock(TaskModel.class);
workflowModel.setTasks(List.of(taskModel));
when(taskModel.getTaskDefinition()).thenReturn(Optional.of(taskDef));
when(taskModel.getStatus()).thenReturn(Status.SCHEDULED);
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
defaultPostPoneOffSetSeconds * 1000);
}
@Test
public void testPostponeDurationForTaskInScheduledWithNoPollTimeoutSet() {
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskDef taskDef = new TaskDef();
WorkflowDef workflowDef = new WorkflowDef();
workflowModel.setWorkflowDefinition(workflowDef);
TaskModel taskModel = mock(TaskModel.class);
workflowModel.setTasks(List.of(taskModel));
when(taskModel.getStatus()).thenReturn(Status.SCHEDULED);
when(taskModel.getTaskDefinition()).thenReturn(Optional.of(taskDef));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
defaultPostPoneOffSetSeconds * 1000);
}
@Test
public void testPostponeDurationForTaskInScheduledWithPollTimeoutSet() {
int pollTimeout = 200;
WorkflowModel workflowModel = new WorkflowModel();
workflowModel.setWorkflowId("1");
TaskDef taskDef = new TaskDef();
taskDef.setPollTimeoutSeconds(pollTimeout);
TaskModel taskModel = mock(TaskModel.class);
;
workflowModel.setTasks(List.of(taskModel));
when(taskModel.getStatus()).thenReturn(Status.SCHEDULED);
when(taskModel.getTaskDefinition()).thenReturn(Optional.of(taskDef));
when(properties.getWorkflowOffsetTimeout())
.thenReturn(Duration.ofSeconds(defaultPostPoneOffSetSeconds));
workflowSweeper.unack(workflowModel, defaultPostPoneOffSetSeconds);
verify(queueDAO)
.setUnackTimeout(
DECIDER_QUEUE, workflowModel.getWorkflowId(), (pollTimeout + 1) * 1000);
}
@Test
public void testWorkflowOffsetJitter() {
long offset = 45;
for (int i = 0; i < 10; i++) {
long offsetWithJitter = workflowSweeper.workflowOffsetWithJitter(offset);
assertTrue(offsetWithJitter >= 30);
assertTrue(offsetWithJitter <= 60);
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/reconciliation/TestWorkflowRepairService.java | core/src/test/java/com/netflix/conductor/core/reconciliation/TestWorkflowRepairService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.reconciliation;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.events.EventQueues;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.tasks.*;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.ExecutionDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.*;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TestWorkflowRepairService {
private QueueDAO queueDAO;
private ExecutionDAO executionDAO;
private ConductorProperties properties;
private WorkflowRepairService workflowRepairService;
private SystemTaskRegistry systemTaskRegistry;
@Before
public void setUp() {
executionDAO = mock(ExecutionDAO.class);
queueDAO = mock(QueueDAO.class);
properties = mock(ConductorProperties.class);
systemTaskRegistry = mock(SystemTaskRegistry.class);
workflowRepairService =
new WorkflowRepairService(executionDAO, queueDAO, properties, systemTaskRegistry);
}
@Test
public void verifyAndRepairSimpleTaskInScheduledState() {
TaskModel task = new TaskModel();
task.setTaskType("SIMPLE");
task.setStatus(TaskModel.Status.SCHEDULED);
task.setTaskId("abcd");
task.setCallbackAfterSeconds(60);
when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false);
assertTrue(workflowRepairService.verifyAndRepairTask(task));
// Verify that a new queue message is pushed for sync system tasks that fails queue contains
// check.
verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong());
}
@Test
public void verifySimpleTaskInProgressState() {
TaskModel task = new TaskModel();
task.setTaskType("SIMPLE");
task.setStatus(TaskModel.Status.IN_PROGRESS);
task.setTaskId("abcd");
task.setCallbackAfterSeconds(60);
when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false);
assertFalse(workflowRepairService.verifyAndRepairTask(task));
// Verify that queue message is never pushed for simple task in IN_PROGRESS state
verify(queueDAO, never()).containsMessage(anyString(), anyString());
verify(queueDAO, never()).push(anyString(), anyString(), anyLong());
}
@Test
public void verifyAndRepairSystemTask() {
String taskType = "TEST_SYS_TASK";
TaskModel task = new TaskModel();
task.setTaskType(taskType);
task.setStatus(TaskModel.Status.SCHEDULED);
task.setTaskId("abcd");
task.setCallbackAfterSeconds(60);
when(systemTaskRegistry.isSystemTask("TEST_SYS_TASK")).thenReturn(true);
when(systemTaskRegistry.get(taskType))
.thenReturn(
new WorkflowSystemTask("TEST_SYS_TASK") {
@Override
public boolean isAsync() {
return true;
}
@Override
public boolean isAsyncComplete(TaskModel task) {
return false;
}
@Override
public void start(
WorkflowModel workflow,
TaskModel task,
WorkflowExecutor executor) {
super.start(workflow, task, executor);
}
});
when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false);
assertTrue(workflowRepairService.verifyAndRepairTask(task));
// Verify that a new queue message is pushed for tasks that fails queue contains check.
verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong());
// Verify a system task in IN_PROGRESS state can be recovered.
reset(queueDAO);
task.setStatus(TaskModel.Status.IN_PROGRESS);
assertTrue(workflowRepairService.verifyAndRepairTask(task));
// Verify that a new queue message is pushed for async System task in IN_PROGRESS state that
// fails queue contains check.
verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong());
}
@Test
public void assertSyncSystemTasksAreNotCheckedAgainstQueue() {
// Return a Switch task object to init WorkflowSystemTask registry.
when(systemTaskRegistry.get(TASK_TYPE_DECISION)).thenReturn(new Decision());
when(systemTaskRegistry.isSystemTask(TASK_TYPE_DECISION)).thenReturn(true);
when(systemTaskRegistry.get(TASK_TYPE_SWITCH)).thenReturn(new Switch());
when(systemTaskRegistry.isSystemTask(TASK_TYPE_SWITCH)).thenReturn(true);
TaskModel task = new TaskModel();
task.setTaskType(TASK_TYPE_DECISION);
task.setStatus(TaskModel.Status.SCHEDULED);
assertFalse(workflowRepairService.verifyAndRepairTask(task));
// Verify that queue contains is never checked for sync system tasks
verify(queueDAO, never()).containsMessage(anyString(), anyString());
// Verify that queue message is never pushed for sync system tasks
verify(queueDAO, never()).push(anyString(), anyString(), anyLong());
task = new TaskModel();
task.setTaskType(TASK_TYPE_SWITCH);
task.setStatus(TaskModel.Status.SCHEDULED);
assertFalse(workflowRepairService.verifyAndRepairTask(task));
// Verify that queue contains is never checked for sync system tasks
verify(queueDAO, never()).containsMessage(anyString(), anyString());
// Verify that queue message is never pushed for sync system tasks
verify(queueDAO, never()).push(anyString(), anyString(), anyLong());
}
@Test
public void assertAsyncCompleteInProgressSystemTasksAreNotCheckedAgainstQueue() {
TaskModel task = new TaskModel();
task.setTaskType(TASK_TYPE_EVENT);
task.setStatus(TaskModel.Status.IN_PROGRESS);
task.setTaskId("abcd");
task.setCallbackAfterSeconds(60);
task.setInputData(Map.of("asyncComplete", true));
WorkflowSystemTask workflowSystemTask =
new Event(
mock(EventQueues.class),
mock(ParametersUtils.class),
mock(ObjectMapper.class));
when(systemTaskRegistry.get(TASK_TYPE_EVENT)).thenReturn(workflowSystemTask);
assertTrue(workflowSystemTask.isAsyncComplete(task));
assertFalse(workflowRepairService.verifyAndRepairTask(task));
// Verify that queue message is never pushed for async complete system tasks
verify(queueDAO, never()).containsMessage(anyString(), anyString());
verify(queueDAO, never()).push(anyString(), anyString(), anyLong());
}
@Test
public void assertAsyncCompleteScheduledSystemTasksAreCheckedAgainstQueue() {
TaskModel task = new TaskModel();
task.setTaskType(TASK_TYPE_SUB_WORKFLOW);
task.setStatus(TaskModel.Status.SCHEDULED);
task.setTaskId("abcd");
task.setCallbackAfterSeconds(60);
WorkflowSystemTask workflowSystemTask = new SubWorkflow(new ObjectMapper());
when(systemTaskRegistry.get(TASK_TYPE_SUB_WORKFLOW)).thenReturn(workflowSystemTask);
when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false);
assertTrue(workflowSystemTask.isAsyncComplete(task));
assertTrue(workflowRepairService.verifyAndRepairTask(task));
// Verify that queue message is never pushed for async complete system tasks
verify(queueDAO, times(1)).containsMessage(anyString(), anyString());
verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong());
}
@Test
public void verifyAndRepairParentWorkflow() {
WorkflowModel workflow = new WorkflowModel();
workflow.setWorkflowId("abcd");
workflow.setParentWorkflowId("parentWorkflowId");
when(properties.getWorkflowOffsetTimeout()).thenReturn(Duration.ofSeconds(10));
when(executionDAO.getWorkflow("abcd", true)).thenReturn(workflow);
when(queueDAO.containsMessage(anyString(), anyString())).thenReturn(false);
workflowRepairService.verifyAndRepairWorkflowTasks("abcd");
verify(queueDAO, times(1)).containsMessage(anyString(), anyString());
verify(queueDAO, times(1)).push(anyString(), anyString(), anyLong());
}
@Test
public void assertInProgressSubWorkflowSystemTasksAreCheckedAndRepaired() {
String subWorkflowId = "subWorkflowId";
String taskId = "taskId";
TaskModel task = new TaskModel();
task.setTaskType(TASK_TYPE_SUB_WORKFLOW);
task.setStatus(TaskModel.Status.IN_PROGRESS);
task.setTaskId(taskId);
task.setCallbackAfterSeconds(60);
task.setSubWorkflowId(subWorkflowId);
Map<String, Object> outputMap = new HashMap<>();
outputMap.put("subWorkflowId", subWorkflowId);
task.setOutputData(outputMap);
WorkflowModel subWorkflow = new WorkflowModel();
subWorkflow.setWorkflowId(subWorkflowId);
subWorkflow.setStatus(WorkflowModel.Status.TERMINATED);
subWorkflow.setOutput(Map.of("k1", "v1", "k2", "v2"));
when(executionDAO.getWorkflow(subWorkflowId, false)).thenReturn(subWorkflow);
assertTrue(workflowRepairService.verifyAndRepairTask(task));
// Verify that queue message is never pushed for async complete system tasks
verify(queueDAO, never()).containsMessage(anyString(), anyString());
verify(queueDAO, never()).push(anyString(), anyString(), anyLong());
// Verify
ArgumentCaptor<TaskModel> argumentCaptor = ArgumentCaptor.forClass(TaskModel.class);
verify(executionDAO, times(1)).updateTask(argumentCaptor.capture());
assertEquals(taskId, argumentCaptor.getValue().getTaskId());
assertEquals(subWorkflowId, argumentCaptor.getValue().getSubWorkflowId());
assertEquals(TaskModel.Status.CANCELED, argumentCaptor.getValue().getStatus());
assertNotNull(argumentCaptor.getValue().getOutputData());
assertEquals(subWorkflowId, argumentCaptor.getValue().getOutputData().get("subWorkflowId"));
assertEquals("v1", argumentCaptor.getValue().getOutputData().get("k1"));
assertEquals("v2", argumentCaptor.getValue().getOutputData().get("k2"));
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/test/java/com/netflix/conductor/core/sync/local/LocalOnlyLockTest.java | core/src/test/java/com/netflix/conductor/core/sync/local/LocalOnlyLockTest.java | /*
* Copyright 2020 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.sync.local;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@Ignore
// Test always times out in CI environment
public class LocalOnlyLockTest {
// Lock can be global since it uses global cache internally
private final LocalOnlyLock localOnlyLock = new LocalOnlyLock();
@After
public void tearDown() {
// Clean caches between tests as they are shared globally
localOnlyLock.cache().invalidateAll();
localOnlyLock.scheduledFutures().values().forEach(f -> f.cancel(false));
localOnlyLock.scheduledFutures().clear();
}
@Test
public void testLockUnlock() {
final boolean a = localOnlyLock.acquireLock("a", 100, 10000, TimeUnit.MILLISECONDS);
assertTrue(a);
assertEquals(localOnlyLock.cache().estimatedSize(), 1);
assertEquals(localOnlyLock.cache().get("a").isLocked(), true);
assertEquals(localOnlyLock.scheduledFutures().size(), 1);
localOnlyLock.releaseLock("a");
assertEquals(localOnlyLock.scheduledFutures().size(), 0);
assertEquals(localOnlyLock.cache().get("a").isLocked(), false);
localOnlyLock.deleteLock("a");
assertEquals(localOnlyLock.cache().estimatedSize(), 0);
}
@Test(timeout = 10 * 10_000)
public void testLockTimeout() throws InterruptedException, ExecutionException {
final ExecutorService executor = Executors.newFixedThreadPool(1);
executor.submit(
() -> {
localOnlyLock.acquireLock("c", 100, 1000, TimeUnit.MILLISECONDS);
})
.get();
assertTrue(localOnlyLock.acquireLock("d", 100, 1000, TimeUnit.MILLISECONDS));
assertFalse(localOnlyLock.acquireLock("c", 100, 1000, TimeUnit.MILLISECONDS));
assertEquals(localOnlyLock.scheduledFutures().size(), 2);
executor.submit(
() -> {
localOnlyLock.releaseLock("c");
})
.get();
localOnlyLock.releaseLock("d");
assertEquals(localOnlyLock.scheduledFutures().size(), 0);
}
@Test(timeout = 10 * 10_000)
public void testReleaseFromAnotherThread() throws InterruptedException, ExecutionException {
final ExecutorService executor = Executors.newFixedThreadPool(1);
executor.submit(
() -> {
localOnlyLock.acquireLock("c", 100, 10000, TimeUnit.MILLISECONDS);
})
.get();
// Releasing from another thread should not throw exception (it's caught internally)
localOnlyLock.releaseLock("c");
// The owning thread should still be able to release the lock
executor.submit(
() -> {
localOnlyLock.releaseLock("c");
})
.get();
localOnlyLock.deleteLock("c");
}
@Test(timeout = 10 * 10_000)
public void testLockLeaseWithRelease() throws Exception {
localOnlyLock.acquireLock("b", 1000, 1000, TimeUnit.MILLISECONDS);
localOnlyLock.releaseLock("b");
// Wait for lease to run out and also call release
Thread.sleep(2000);
localOnlyLock.acquireLock("b");
assertEquals(true, localOnlyLock.cache().get("b").isLocked());
localOnlyLock.releaseLock("b");
}
@Test
public void testRelease() {
localOnlyLock.releaseLock("x54as4d2;23'4");
localOnlyLock.releaseLock("x54as4d2;23'4");
assertEquals(false, localOnlyLock.cache().get("x54as4d2;23'4").isLocked());
}
@Test(timeout = 10 * 10_000)
public void testLockLeaseTime() throws InterruptedException {
for (int i = 0; i < 10; i++) {
final Thread thread =
new Thread(
() -> {
localOnlyLock.acquireLock("a", 1000, 100, TimeUnit.MILLISECONDS);
});
thread.start();
thread.join();
}
localOnlyLock.acquireLock("a");
assertTrue(localOnlyLock.cache().get("a").isLocked());
localOnlyLock.releaseLock("a");
localOnlyLock.deleteLock("a");
}
@Test
public void testLockConfiguration() {
new ApplicationContextRunner()
.withPropertyValues("conductor.workflow-execution-lock.type=local_only")
.withUserConfiguration(LocalOnlyLockConfiguration.class)
.run(
context -> {
LocalOnlyLock lock = context.getBean(LocalOnlyLock.class);
assertNotNull(lock);
});
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/RateLimitingDAO.java | core/src/main/java/com/netflix/conductor/dao/RateLimitingDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.model.TaskModel;
/** An abstraction to enable different Rate Limiting implementations */
public interface RateLimitingDAO {
/**
* Checks if the Task is rate limited or not based on the {@link
* TaskModel#getRateLimitPerFrequency()} and {@link TaskModel#getRateLimitFrequencyInSeconds()}
*
* @param task: which needs to be evaluated whether it is rateLimited or not
* @return true: If the {@link TaskModel} is rateLimited false: If the {@link TaskModel} is not
* rateLimited
*/
boolean exceedsRateLimitPerFrequency(TaskModel task, TaskDef taskDef);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/PollDataDAO.java | core/src/main/java/com/netflix/conductor/dao/PollDataDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.List;
import com.netflix.conductor.common.metadata.tasks.PollData;
/** An abstraction to enable different PollData store implementations */
public interface PollDataDAO {
/**
* Updates the {@link PollData} information with the most recently polled data for a task queue.
*
* @param taskDefName name of the task as specified in the task definition
* @param domain domain in which this task is being polled from
* @param workerId the identifier of the worker polling for this task
*/
void updateLastPollData(String taskDefName, String domain, String workerId);
/**
* Retrieve the {@link PollData} for the given task in the given domain.
*
* @param taskDefName name of the task as specified in the task definition
* @param domain domain for which {@link PollData} is being requested
* @return the {@link PollData} for the given task queue in the specified domain
*/
PollData getPollData(String taskDefName, String domain);
/**
* Retrieve the {@link PollData} for the given task across all domains.
*
* @param taskDefName name of the task as specified in the task definition
* @return the {@link PollData} for the given task queue in all domains
*/
List<PollData> getPollData(String taskDefName);
/**
* Retrieve the {@link PollData} for all task types
*
* @return the {@link PollData} for all task types
*/
default List<PollData> getAllPollData() {
throw new UnsupportedOperationException(
"The selected PollDataDAO ("
+ this.getClass().getSimpleName()
+ ") does not implement the getAllPollData() method");
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/ConcurrentExecutionLimitDAO.java | core/src/main/java/com/netflix/conductor/dao/ConcurrentExecutionLimitDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.model.TaskModel;
/**
* A contract to support concurrency limits of tasks.
*
* @since v3.3.5.
*/
public interface ConcurrentExecutionLimitDAO {
default void addTaskToLimit(TaskModel task) {
throw new UnsupportedOperationException(
getClass() + " does not support addTaskToLimit method.");
}
default void removeTaskFromLimit(TaskModel task) {
throw new UnsupportedOperationException(
getClass() + " does not support removeTaskFromLimit method.");
}
/**
* Checks if the number of tasks in progress for the given taskDef will exceed the limit if the
* task is scheduled to be in progress (given to the worker or for system tasks start() method
* called)
*
* @param task The task to be executed. Limit is set in the Task's definition
* @return true if by executing this task, the limit is breached. false otherwise.
* @see TaskDef#concurrencyLimit()
*/
boolean exceedsLimit(TaskModel task);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java | core/src/main/java/com/netflix/conductor/dao/IndexDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.metadata.tasks.TaskExecLog;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.common.run.WorkflowSummary;
import com.netflix.conductor.core.events.queue.Message;
/** DAO to index the workflow and task details for searching. */
public interface IndexDAO {
/** Setup method in charge or initializing/populating the index. */
void setup() throws Exception;
/**
* This method should return an unique identifier of the indexed doc
*
* @param workflow Workflow to be indexed
*/
void indexWorkflow(WorkflowSummary workflow);
/**
* This method should return an unique identifier of the indexed doc
*
* @param workflow Workflow to be indexed
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncIndexWorkflow(WorkflowSummary workflow);
/**
* @param task Task to be indexed
*/
void indexTask(TaskSummary task);
/**
* @param task Task to be indexed asynchronously
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncIndexTask(TaskSummary task);
/**
* @param query SQL like query for workflow search parameters.
* @param freeText Additional query in free text. Lucene syntax
* @param start start start index for pagination
* @param count count # of workflow ids to be returned
* @param sort sort options
* @return List of workflow ids for the matching query
*/
SearchResult<String> searchWorkflows(
String query, String freeText, int start, int count, List<String> sort);
/**
* @param query SQL like query for workflow search parameters.
* @param freeText Additional query in free text. Lucene syntax
* @param start start start index for pagination
* @param count count # of workflow ids to be returned
* @param sort sort options
* @return List of workflows for the matching query
*/
SearchResult<WorkflowSummary> searchWorkflowSummary(
String query, String freeText, int start, int count, List<String> sort);
/**
* @param query SQL like query for task search parameters.
* @param freeText Additional query in free text. Lucene syntax
* @param start start start index for pagination
* @param count count # of task ids to be returned
* @param sort sort options
* @return List of task ids for the matching query
*/
SearchResult<String> searchTasks(
String query, String freeText, int start, int count, List<String> sort);
/**
* @param query SQL like query for task search parameters.
* @param freeText Additional query in free text. Lucene syntax
* @param start start start index for pagination
* @param count count # of task ids to be returned
* @param sort sort options
* @return List of tasks for the matching query
*/
SearchResult<TaskSummary> searchTaskSummary(
String query, String freeText, int start, int count, List<String> sort);
/**
* Remove the workflow index
*
* @param workflowId workflow to be removed
*/
void removeWorkflow(String workflowId);
/**
* Remove the workflow index
*
* @param workflowId workflow to be removed
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncRemoveWorkflow(String workflowId);
/**
* Updates the index
*
* @param workflowInstanceId id of the workflow
* @param keys keys to be updated
* @param values values. Number of keys and values MUST match.
*/
void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values);
/**
* Updates the index
*
* @param workflowInstanceId id of the workflow
* @param keys keys to be updated
* @param values values. Number of keys and values MUST match.
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncUpdateWorkflow(
String workflowInstanceId, String[] keys, Object[] values);
/**
* Remove the task index
*
* @param workflowId workflow containing task
* @param taskId task to be removed
*/
void removeTask(String workflowId, String taskId);
/**
* Remove the task index asynchronously
*
* @param workflowId workflow containing task
* @param taskId task to be removed
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncRemoveTask(String workflowId, String taskId);
/**
* Updates the index
*
* @param workflowId id of the workflow
* @param taskId id of the task
* @param keys keys to be updated
* @param values values. Number of keys and values MUST match.
*/
void updateTask(String workflowId, String taskId, String[] keys, Object[] values);
/**
* Updates the index
*
* @param workflowId id of the workflow
* @param taskId id of the task
* @param keys keys to be updated
* @param values values. Number of keys and values MUST match.
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncUpdateTask(
String workflowId, String taskId, String[] keys, Object[] values);
/**
* Retrieves a specific field from the index
*
* @param workflowInstanceId id of the workflow
* @param key field to be retrieved
* @return value of the field as string
*/
String get(String workflowInstanceId, String key);
/**
* @param logs Task Execution logs to be indexed
*/
void addTaskExecutionLogs(List<TaskExecLog> logs);
/**
* @param logs Task Execution logs to be indexed
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncAddTaskExecutionLogs(List<TaskExecLog> logs);
/**
* @param taskId Id of the task for which to fetch the execution logs
* @return Returns the task execution logs for given task id
*/
List<TaskExecLog> getTaskExecutionLogs(String taskId);
/**
* @param eventExecution Event Execution to be indexed
*/
void addEventExecution(EventExecution eventExecution);
List<EventExecution> getEventExecutions(String event);
/**
* @param eventExecution Event Execution to be indexed
* @return CompletableFuture of type void
*/
CompletableFuture<Void> asyncAddEventExecution(EventExecution eventExecution);
/**
* Adds an incoming external message into the index
*
* @param queue Name of the registered queue
* @param msg Message
*/
void addMessage(String queue, Message msg);
/**
* Adds an incoming external message into the index
*
* @param queue Name of the registered queue
* @param message {@link Message}
* @return CompletableFuture of type Void
*/
CompletableFuture<Void> asyncAddMessage(String queue, Message message);
List<Message> getMessages(String queue);
/**
* Search for Workflows completed or failed beyond archiveTtlDays
*
* @param indexName Name of the index to search
* @param archiveTtlDays Archival Time to Live
* @return List of workflow Ids matching the pattern
*/
List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays);
/**
* Get total workflow counts that matches the query
*
* @param query SQL like query for workflow search parameters.
* @param freeText Additional query in free text. Lucene syntax
* @return Number of matches for the query
*/
long getWorkflowCount(String query, String freeText);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/ExecutionDAO.java | core/src/main/java/com/netflix/conductor/dao/ExecutionDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.List;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
/** Data access layer for storing workflow executions */
public interface ExecutionDAO {
/**
* @param taskName Name of the task
* @param workflowId Workflow instance id
* @return List of pending tasks (in_progress)
*/
List<TaskModel> getPendingTasksByWorkflow(String taskName, String workflowId);
/**
* @param taskType Type of task
* @param startKey start
* @param count number of tasks to return
* @return List of tasks starting from startKey
*/
List<TaskModel> getTasks(String taskType, String startKey, int count);
/**
* @param tasks tasks to be created
* @return List of tasks that were created.
* <p><b>Note on the primary key constraint</b>
* <p>For a given task reference name and retryCount should be considered unique/primary
* key. Given two tasks with the same reference name and retryCount only one should be added
* to the database.
*/
List<TaskModel> createTasks(List<TaskModel> tasks);
/**
* @param task Task to be updated
*/
void updateTask(TaskModel task);
/**
* Checks if the number of tasks in progress for the given taskDef will exceed the limit if the
* task is scheduled to be in progress (given to the worker or for system tasks start() method
* called)
*
* @param task The task to be executed. Limit is set in the Task's definition
* @return true if by executing this task, the limit is breached. false otherwise.
* @see TaskDef#concurrencyLimit()
* @deprecated Since v3.3.5. Use {@link ConcurrentExecutionLimitDAO#exceedsLimit(TaskModel)}.
*/
@Deprecated
default boolean exceedsInProgressLimit(TaskModel task) {
throw new UnsupportedOperationException(
getClass() + "does not support exceedsInProgressLimit");
}
/**
* @param taskId id of the task to be removed.
* @return true if the deletion is successful, false otherwise.
*/
boolean removeTask(String taskId);
/**
* @param taskId Task instance id
* @return Task
*/
TaskModel getTask(String taskId);
/**
* @param taskIds Task instance ids
* @return List of tasks
*/
List<TaskModel> getTasks(List<String> taskIds);
/**
* @param taskType Type of the task for which to retrieve the list of pending tasks
* @return List of pending tasks
*/
List<TaskModel> getPendingTasksForTaskType(String taskType);
/**
* @param workflowId Workflow instance id
* @return List of tasks for the given workflow instance id
*/
List<TaskModel> getTasksForWorkflow(String workflowId);
/**
* @param workflow Workflow to be created
* @return Id of the newly created workflow
*/
String createWorkflow(WorkflowModel workflow);
/**
* @param workflow Workflow to be updated
* @return Id of the updated workflow
*/
String updateWorkflow(WorkflowModel workflow);
/**
* @param workflowId workflow instance id
* @return true if the deletion is successful, false otherwise
*/
boolean removeWorkflow(String workflowId);
/**
* Removes the workflow with ttl seconds
*
* @param workflowId workflowId workflow instance id
* @param ttlSeconds time to live in seconds.
* @return
*/
boolean removeWorkflowWithExpiry(String workflowId, int ttlSeconds);
/**
* @param workflowType Workflow Type
* @param workflowId workflow instance id
*/
void removeFromPendingWorkflow(String workflowType, String workflowId);
/**
* @param workflowId workflow instance id
* @return Workflow
*/
WorkflowModel getWorkflow(String workflowId);
/**
* @param workflowId workflow instance id
* @param includeTasks if set, includes the tasks (pending and completed) sorted by Task
* Sequence number in Workflow.
* @return Workflow instance details
*/
WorkflowModel getWorkflow(String workflowId, boolean includeTasks);
/**
* @param workflowName name of the workflow
* @param version the workflow version
* @return List of workflow ids which are running
*/
List<String> getRunningWorkflowIds(String workflowName, int version);
/**
* @param workflowName Name of the workflow
* @param version the workflow version
* @return List of workflows that are running
*/
List<WorkflowModel> getPendingWorkflowsByType(String workflowName, int version);
/**
* @param workflowName Name of the workflow
* @return No. of running workflows
*/
long getPendingWorkflowCount(String workflowName);
/**
* @param taskDefName Name of the task
* @return Number of task currently in IN_PROGRESS status
*/
long getInProgressTaskCount(String taskDefName);
/**
* @param workflowName Name of the workflow
* @param startTime epoch time
* @param endTime epoch time
* @return List of workflows between start and end time
*/
List<WorkflowModel> getWorkflowsByType(String workflowName, Long startTime, Long endTime);
/**
* @param workflowName workflow name
* @param correlationId Correlation Id
* @param includeTasks Option to includeTasks in results
* @return List of workflows by correlation id
*/
List<WorkflowModel> getWorkflowsByCorrelationId(
String workflowName, String correlationId, boolean includeTasks);
/**
* @return true, if the DAO implementation is capable of searching across workflows false, if
* the DAO implementation cannot perform searches across workflows (and needs to use
* indexDAO)
*/
boolean canSearchAcrossWorkflows();
// Events
/**
* @param eventExecution Event Execution to be stored
* @return true if the event was added. false otherwise when the event by id is already already
* stored.
*/
boolean addEventExecution(EventExecution eventExecution);
/**
* @param eventExecution Event execution to be updated
*/
void updateEventExecution(EventExecution eventExecution);
/**
* @param eventExecution Event execution to be removed
*/
void removeEventExecution(EventExecution eventExecution);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/QueueDAO.java | core/src/main/java/com/netflix/conductor/dao/QueueDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.events.queue.Message;
/** DAO responsible for managing queuing for the tasks. */
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Component
public interface QueueDAO {
/**
* @param queueName name of the queue
* @param id message id
* @param offsetTimeInSecond time in seconds, after which the message should be marked visible.
* (for timed queues)
*/
void push(String queueName, String id, long offsetTimeInSecond);
/**
* @param queueName name of the queue
* @param id message id
* @param priority message priority (between 0 and 99)
* @param offsetTimeInSecond time in seconds, after which the message should be marked visible.
* (for timed queues)
*/
void push(String queueName, String id, int priority, long offsetTimeInSecond);
/**
* @param queueName Name of the queue
* @param messages messages to be pushed.
*/
void push(String queueName, List<Message> messages);
/**
* @param queueName Name of the queue
* @param id message id
* @param offsetTimeInSecond time in seconds, after which the message should be marked visible.
* (for timed queues)
* @return true if the element was added to the queue. false otherwise indicating the element
* already exists in the queue.
*/
boolean pushIfNotExists(String queueName, String id, long offsetTimeInSecond);
/**
* @param queueName Name of the queue
* @param id message id
* @param priority message priority (between 0 and 99)
* @param offsetTimeInSecond time in seconds, after which the message should be marked visible.
* (for timed queues)
* @return true if the element was added to the queue. false otherwise indicating the element
* already exists in the queue.
*/
boolean pushIfNotExists(String queueName, String id, int priority, long offsetTimeInSecond);
/**
* @param queueName Name of the queue
* @param count number of messages to be read from the queue
* @param timeout timeout in milliseconds
* @return list of elements from the named queue
*/
List<String> pop(String queueName, int count, int timeout);
/**
* @param queueName Name of the queue
* @param count number of messages to be read from the queue
* @param timeout timeout in milliseconds
* @return list of elements from the named queue
*/
List<Message> pollMessages(String queueName, int count, int timeout);
/**
* @param queueName Name of the queue
* @param messageId Message id
*/
void remove(String queueName, String messageId);
/**
* @param queueName Name of the queue
* @return size of the queue
*/
int getSize(String queueName);
/**
* @param queueName Name of the queue
* @param messageId Message Id
* @return true if the message was found and ack'ed
*/
boolean ack(String queueName, String messageId);
/**
* Extend the lease of the unacknowledged message for longer period.
*
* @param queueName Name of the queue
* @param messageId Message Id
* @param unackTimeout timeout in milliseconds for which the unack lease should be extended.
* (replaces the current value with this value)
* @return true if the message was updated with extended lease. false otherwise.
*/
boolean setUnackTimeout(String queueName, String messageId, long unackTimeout);
/**
* @param queueName Name of the queue
*/
void flush(String queueName);
/**
* @return key : queue name, value: size of the queue
*/
Map<String, Long> queuesDetail();
/**
* @return key : queue name, value: map of shard name to size and unack queue size
*/
Map<String, Map<String, Map<String, Long>>> queuesDetailVerbose();
default void processUnacks(String queueName) {}
/**
* Resets the offsetTime on a message to 0, without pulling out the message from the queue
*
* @param queueName name of the queue
* @param id message id
* @return true if the message is in queue and the change was successful else returns false
*/
boolean resetOffsetTime(String queueName, String id);
/**
* Postpone a given message with postponeDurationInSeconds, so that the message won't be
* available for further polls until specified duration. By default, the message is removed and
* pushed backed with postponeDurationInSeconds to be backwards compatible.
*
* @param queueName name of the queue
* @param messageId message id
* @param priority message priority (between 0 and 99)
* @param postponeDurationInSeconds duration in seconds by which the message is to be postponed
*/
default boolean postpone(
String queueName, String messageId, int priority, long postponeDurationInSeconds) {
remove(queueName, messageId);
push(queueName, messageId, priority, postponeDurationInSeconds);
return true;
}
/**
* Check if the message with given messageId exists in the Queue.
*
* @param queueName
* @param messageId
* @return
*/
default boolean containsMessage(String queueName, String messageId) {
throw new UnsupportedOperationException(
"Please ensure your provided Queue implementation overrides and implements this method.");
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java | core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.List;
import java.util.Optional;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
/** Data access layer for the workflow metadata - task definitions and workflow definitions */
public interface MetadataDAO {
/**
* @param taskDef task definition to be created
*/
TaskDef createTaskDef(TaskDef taskDef);
/**
* @param taskDef task definition to be updated.
* @return name of the task definition
*/
TaskDef updateTaskDef(TaskDef taskDef);
/**
* @param name Name of the task
* @return Task Definition
*/
TaskDef getTaskDef(String name);
/**
* @return All the task definitions
*/
List<TaskDef> getAllTaskDefs();
/**
* @param name Name of the task
*/
void removeTaskDef(String name);
/**
* @param def workflow definition
*/
void createWorkflowDef(WorkflowDef def);
/**
* @param def workflow definition
*/
void updateWorkflowDef(WorkflowDef def);
/**
* @param name Name of the workflow
* @return Workflow Definition
*/
Optional<WorkflowDef> getLatestWorkflowDef(String name);
/**
* @param name Name of the workflow
* @param version version
* @return workflow definition
*/
Optional<WorkflowDef> getWorkflowDef(String name, int version);
/**
* @param name Name of the workflow definition to be removed
* @param version Version of the workflow definition to be removed
*/
void removeWorkflowDef(String name, Integer version);
/**
* @return List of all the workflow definitions
*/
List<WorkflowDef> getAllWorkflowDefs();
/**
* @return List the latest versions of the workflow definitions
*/
List<WorkflowDef> getAllWorkflowDefsLatestVersions();
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/dao/EventHandlerDAO.java | core/src/main/java/com/netflix/conductor/dao/EventHandlerDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.dao;
import java.util.List;
import com.netflix.conductor.common.metadata.events.EventHandler;
/** An abstraction to enable different Event Handler store implementations */
public interface EventHandlerDAO {
/**
* @param eventHandler Event handler to be added.
* <p><em>NOTE:</em> Will throw an exception if an event handler already exists with the
* name
*/
void addEventHandler(EventHandler eventHandler);
/**
* @param eventHandler Event handler to be updated.
*/
void updateEventHandler(EventHandler eventHandler);
/**
* @param name Removes the event handler from the system
*/
void removeEventHandler(String name);
/**
* @return All the event handlers registered in the system
*/
List<EventHandler> getAllEventHandlers();
/**
* @param event name of the event
* @param activeOnly if true, returns only the active handlers
* @return Returns the list of all the event handlers for a given event
*/
List<EventHandler> getEventHandlersForEvent(String event, boolean activeOnly);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/WorkflowTestService.java | core/src/main/java/com/netflix/conductor/service/WorkflowTestService.java | /*
* Copyright 2023 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.WorkflowTestRequest;
import com.netflix.conductor.dao.ExecutionDAO;
import com.netflix.conductor.model.TaskModel;
@Component
public class WorkflowTestService {
private static final int MAX_LOOPS = 20_000;
private static final Set<String> operators = new HashSet<>();
static {
operators.add(TaskType.TASK_TYPE_JOIN);
operators.add(TaskType.TASK_TYPE_DO_WHILE);
operators.add(TaskType.TASK_TYPE_SET_VARIABLE);
operators.add(TaskType.TASK_TYPE_FORK);
operators.add(TaskType.TASK_TYPE_INLINE);
operators.add(TaskType.TASK_TYPE_TERMINATE);
operators.add(TaskType.TASK_TYPE_DECISION);
operators.add(TaskType.TASK_TYPE_DYNAMIC);
operators.add(TaskType.TASK_TYPE_FORK_JOIN);
operators.add(TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC);
operators.add(TaskType.TASK_TYPE_SWITCH);
operators.add(TaskType.TASK_TYPE_SUB_WORKFLOW);
}
private final WorkflowService workflowService;
private final ExecutionDAO executionDAO;
private final ExecutionService workflowExecutionService;
public WorkflowTestService(
WorkflowService workflowService,
ExecutionDAO executionDAO,
ExecutionService workflowExecutionService) {
this.workflowService = workflowService;
this.executionDAO = executionDAO;
this.workflowExecutionService = workflowExecutionService;
}
public Workflow testWorkflow(WorkflowTestRequest request) {
request.setName(request.getName());
request.setVersion(request.getVersion());
String domain = UUID.randomUUID().toString();
// Ensure the workflows started for the testing are not picked by any workers
request.getTaskToDomain().put("*", domain);
String workflowId = workflowService.startWorkflow(request);
return testWorkflow(request, workflowId);
}
private Workflow testWorkflow(WorkflowTestRequest request, String workflowId) {
Map<String, List<WorkflowTestRequest.TaskMock>> mockData = request.getTaskRefToMockOutput();
Workflow workflow;
int loopCount = 0;
do {
loopCount++;
workflow = workflowService.getExecutionStatus(workflowId, true);
if (loopCount > MAX_LOOPS) {
// Short circuit to avoid large loops
return workflow;
}
List<String> runningTasksMissingInput =
workflow.getTasks().stream()
.filter(task -> !operators.contains(task.getTaskType()))
.filter(t -> !t.getStatus().isTerminal())
.filter(t2 -> !mockData.containsKey(t2.getReferenceTaskName()))
.map(task -> task.getReferenceTaskName())
.collect(Collectors.toList());
if (!runningTasksMissingInput.isEmpty()) {
break;
}
Stream<Task> runningTasks =
workflow.getTasks().stream().filter(t -> !t.getStatus().isTerminal());
runningTasks.forEach(
running -> {
if (running.getTaskType().equals(TaskType.SUB_WORKFLOW.name())) {
String subWorkflowId = running.getSubWorkflowId();
WorkflowTestRequest subWorkflowTestRequest =
request.getSubWorkflowTestRequest()
.get(running.getReferenceTaskName());
if (subWorkflowId != null && subWorkflowTestRequest != null) {
testWorkflow(subWorkflowTestRequest, subWorkflowId);
}
}
String refName = running.getReferenceTaskName();
List<WorkflowTestRequest.TaskMock> taskMock = mockData.get(refName);
if (taskMock == null
|| taskMock.isEmpty()
|| operators.contains(running.getTaskType())) {
mockData.remove(refName);
workflowService.decideWorkflow(workflowId);
} else {
WorkflowTestRequest.TaskMock task = taskMock.remove(0);
if (task.getExecutionTime() > 0 || task.getQueueWaitTime() > 0) {
TaskModel existing = executionDAO.getTask(running.getTaskId());
existing.setScheduledTime(
System.currentTimeMillis()
- (task.getExecutionTime()
+ task.getQueueWaitTime()));
existing.setStartTime(
System.currentTimeMillis() - task.getExecutionTime());
existing.setStatus(
TaskModel.Status.valueOf(task.getStatus().name()));
existing.getOutputData().putAll(task.getOutput());
executionDAO.updateTask(existing);
workflowService.decideWorkflow(workflowId);
} else {
TaskResult taskResult = new TaskResult(running);
taskResult.setStatus(task.getStatus());
taskResult.getOutputData().putAll(task.getOutput());
workflowExecutionService.updateTask(taskResult);
}
}
});
} while (!workflow.getStatus().isTerminal() && !mockData.isEmpty());
return workflow;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/TaskService.java | core/src/main/java/com/netflix/conductor/service/TaskService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import java.util.Map;
import org.springframework.validation.annotation.Validated;
import com.netflix.conductor.common.metadata.tasks.PollData;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskExecLog;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.model.TaskModel;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.NotNull;
@Validated
public interface TaskService {
/**
* Poll for a task of a certain type.
*
* @param taskType Task name
* @param workerId Id of the workflow
* @param domain Domain of the workflow
* @return polled {@link Task}
*/
Task poll(
@NotEmpty(message = "TaskType cannot be null or empty.") String taskType,
String workerId,
String domain);
/**
* Batch Poll for a task of a certain type.
*
* @param taskType Task Name
* @param workerId Id of the workflow
* @param domain Domain of the workflow
* @param count Number of tasks
* @param timeout Timeout for polling in milliseconds
* @return list of {@link Task}
*/
List<Task> batchPoll(
@NotEmpty(message = "TaskType cannot be null or empty.") String taskType,
String workerId,
String domain,
Integer count,
Integer timeout);
/**
* Get in progress tasks. The results are paginated.
*
* @param taskType Task Name
* @param startKey Start index of pagination
* @param count Number of entries
* @return list of {@link Task}
*/
List<Task> getTasks(
@NotEmpty(message = "TaskType cannot be null or empty.") String taskType,
String startKey,
Integer count);
/**
* Get in progress task for a given workflow id.
*
* @param workflowId Id of the workflow
* @param taskReferenceName Task reference name.
* @return instance of {@link Task}
*/
Task getPendingTaskForWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
@NotEmpty(message = "TaskReferenceName cannot be null or empty.")
String taskReferenceName);
/**
* Updates a task.
*
* @param taskResult Instance of {@link TaskResult}
* @return the updated task.
*/
TaskModel updateTask(
@NotNull(message = "TaskResult cannot be null or empty.") @Valid TaskResult taskResult);
/**
* Ack Task is received.
*
* @param taskId Id of the task
* @param workerId Id of the worker
* @return `true|false` if task if received or not
*/
String ackTaskReceived(
@NotEmpty(message = "TaskId cannot be null or empty.") String taskId, String workerId);
/**
* Ack Task is received.
*
* @param taskId Id of the task
* @return `true|false` if task if received or not
*/
boolean ackTaskReceived(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId);
/**
* Log Task Execution Details.
*
* @param taskId Id of the task
* @param log Details you want to log
*/
void log(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId, String log);
/**
* Get Task Execution Logs.
*
* @param taskId Id of the task.
* @return list of {@link TaskExecLog}
*/
List<TaskExecLog> getTaskLogs(
@NotEmpty(message = "TaskId cannot be null or empty.") String taskId);
/**
* Get task by Id.
*
* @param taskId Id of the task.
* @return instance of {@link Task}
*/
Task getTask(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId);
/**
* Remove Task from a Task type queue.
*
* @param taskType Task Name
* @param taskId ID of the task
*/
void removeTaskFromQueue(
@NotEmpty(message = "TaskType cannot be null or empty.") String taskType,
@NotEmpty(message = "TaskId cannot be null or empty.") String taskId);
/**
* Remove Task from a Task type queue.
*
* @param taskId ID of the task
*/
void removeTaskFromQueue(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId);
/**
* Get Task type queue sizes.
*
* @param taskTypes List of task types.
* @return map of task type as Key and queue size as value.
*/
Map<String, Integer> getTaskQueueSizes(List<String> taskTypes);
/**
* Get the queue size for a Task Type. The input can optionally include <code>domain</code>,
* <code>isolationGroupId</code> and <code>executionNamespace</code>.
*
* @return
*/
Integer getTaskQueueSize(
String taskType, String domain, String isolationGroupId, String executionNamespace);
/**
* Get the details about each queue.
*
* @return map of queue details.
*/
Map<String, Map<String, Map<String, Long>>> allVerbose();
/**
* Get the details about each queue.
*
* @return map of details about each queue.
*/
Map<String, Long> getAllQueueDetails();
/**
* Get the last poll data for a given task type.
*
* @param taskType Task Name
* @return list of {@link PollData}
*/
List<PollData> getPollData(
@NotEmpty(message = "TaskType cannot be null or empty.") String taskType);
/**
* Get the last poll data for all task types.
*
* @return list of {@link PollData}
*/
List<PollData> getAllPollData();
/**
* Requeue pending tasks.
*
* @param taskType Task name.
* @return number of tasks requeued.
*/
String requeuePendingTask(
@NotEmpty(message = "TaskType cannot be null or empty.") String taskType);
/**
* Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g.
* sort=name or sort=workflowId. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<TaskSummary> search(
int start, int size, String sort, String freeText, String query);
/**
* Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g.
* sort=name or sort=workflowId. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<Task> searchV2(int start, int size, String sort, String freeText, String query);
/**
* Get the external storage location where the task output payload is stored/to be stored
*
* @param path the path for which the external storage location is to be populated
* @param operation the operation to be performed (read or write)
* @param payloadType the type of payload (input or output)
* @return {@link ExternalStorageLocation} containing the uri and the path to the payload is
* stored in external storage
*/
ExternalStorageLocation getExternalStorageLocation(
String path, String operation, String payloadType);
String updateTask(
String workflowId,
String taskRefName,
TaskResult.Status status,
String workerId,
Map<String, Object> output);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/WorkflowServiceImpl.java | core/src/main/java/com/netflix/conductor/service/WorkflowServiceImpl.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Audit;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.WorkflowSummary;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.utils.Utils;
@Audit
@Trace
@Service
public class WorkflowServiceImpl implements WorkflowService {
private final WorkflowExecutor workflowExecutor;
private final ExecutionService executionService;
private final MetadataService metadataService;
public WorkflowServiceImpl(
WorkflowExecutor workflowExecutor,
ExecutionService executionService,
MetadataService metadataService) {
this.workflowExecutor = workflowExecutor;
this.executionService = executionService;
this.metadataService = metadataService;
}
/**
* Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain.
*
* @param startWorkflowRequest StartWorkflow request for the workflow you want to start.
* @return the id of the workflow instance that can be use for tracking.
*/
public String startWorkflow(StartWorkflowRequest startWorkflowRequest) {
return workflowExecutor.startWorkflow(new StartWorkflowInput(startWorkflowRequest));
}
/**
* Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain.
*
* @param name Name of the workflow you want to start.
* @param version Version of the workflow you want to start.
* @param correlationId CorrelationID of the workflow you want to start.
* @param priority Priority of the workflow you want to start.
* @param input Input to the workflow you want to start.
* @param externalInputPayloadStoragePath the relative path in external storage where input *
* payload is located
* @param taskToDomain the task to domain mapping
* @param workflowDef - workflow definition
* @return the id of the workflow instance that can be use for tracking.
*/
public String startWorkflow(
String name,
Integer version,
String correlationId,
Integer priority,
Map<String, Object> input,
String externalInputPayloadStoragePath,
Map<String, String> taskToDomain,
WorkflowDef workflowDef) {
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName(name);
startWorkflowInput.setVersion(version);
startWorkflowInput.setCorrelationId(correlationId);
startWorkflowInput.setPriority(priority);
startWorkflowInput.setWorkflowInput(input);
startWorkflowInput.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath);
startWorkflowInput.setTaskToDomain(taskToDomain);
startWorkflowInput.setWorkflowDefinition(workflowDef);
return workflowExecutor.startWorkflow(startWorkflowInput);
}
/**
* Start a new workflow. Returns the ID of the workflow instance that can be later used for
* tracking.
*
* @param name Name of the workflow you want to start.
* @param version Version of the workflow you want to start.
* @param correlationId CorrelationID of the workflow you want to start.
* @param priority Priority of the workflow you want to start.
* @param input Input to the workflow you want to start.
* @return the id of the workflow instance that can be use for tracking.
*/
public String startWorkflow(
String name,
Integer version,
String correlationId,
Integer priority,
Map<String, Object> input) {
WorkflowDef workflowDef = metadataService.getWorkflowDef(name, version);
if (workflowDef == null) {
throw new NotFoundException(
"No such workflow found by name: %s, version: %d", name, version);
}
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName(workflowDef.getName());
startWorkflowInput.setVersion(workflowDef.getVersion());
startWorkflowInput.setCorrelationId(correlationId);
startWorkflowInput.setPriority(priority);
startWorkflowInput.setWorkflowInput(input);
return workflowExecutor.startWorkflow(startWorkflowInput);
}
/**
* Lists workflows for the given correlation id.
*
* @param name Name of the workflow.
* @param correlationId CorrelationID of the workflow you want to start.
* @param includeClosed IncludeClosed workflow which are not running.
* @param includeTasks Includes tasks associated with workflows.
* @return a list of {@link Workflow}
*/
public List<Workflow> getWorkflows(
String name, String correlationId, boolean includeClosed, boolean includeTasks) {
return executionService.getWorkflowInstances(
name, correlationId, includeClosed, includeTasks);
}
/**
* Lists workflows for the given correlation id.
*
* @param name Name of the workflow.
* @param includeClosed CorrelationID of the workflow you want to start.
* @param includeTasks IncludeClosed workflow which are not running.
* @param correlationIds Includes tasks associated with workflows.
* @return a {@link Map} of {@link String} as key and a list of {@link Workflow} as value
*/
public Map<String, List<Workflow>> getWorkflows(
String name, boolean includeClosed, boolean includeTasks, List<String> correlationIds) {
Map<String, List<Workflow>> workflowMap = new HashMap<>();
for (String correlationId : correlationIds) {
List<Workflow> workflows =
executionService.getWorkflowInstances(
name, correlationId, includeClosed, includeTasks);
workflowMap.put(correlationId, workflows);
}
return workflowMap;
}
/**
* Gets the workflow by workflow id.
*
* @param workflowId id of the workflow.
* @param includeTasks Includes tasks associated with workflow.
* @return an instance of {@link Workflow}
*/
public Workflow getExecutionStatus(String workflowId, boolean includeTasks) {
Workflow workflow = executionService.getExecutionStatus(workflowId, includeTasks);
if (workflow == null) {
throw new NotFoundException("Workflow with id: %s not found.", workflowId);
}
return workflow;
}
/**
* Removes the workflow from the system.
*
* @param workflowId WorkflowID of the workflow you want to remove from system.
* @param archiveWorkflow Archives the workflow and associated tasks instead of removing them.
*/
public void deleteWorkflow(String workflowId, boolean archiveWorkflow) {
executionService.removeWorkflow(workflowId, archiveWorkflow);
}
/**
* Terminate workflow execution, and then remove it from the system. Acts as terminate and
* remove combined.
*
* @param workflowId WorkflowId of the workflow
* @param reason Reason for terminating the workflow.
* @param archiveWorkflow Archives the workflow and associated tasks instead of removing them.
*/
public void terminateRemove(String workflowId, String reason, boolean archiveWorkflow) {
workflowExecutor.terminateWorkflow(workflowId, reason);
executionService.removeWorkflow(workflowId, archiveWorkflow);
}
/**
* Retrieves all the running workflows.
*
* @param workflowName Name of the workflow.
* @param version Version of the workflow.
* @param startTime start time of the workflow.
* @param endTime EndTime of the workflow
* @return a list of workflow Ids.
*/
public List<String> getRunningWorkflows(
String workflowName, Integer version, Long startTime, Long endTime) {
if (Optional.ofNullable(startTime).orElse(0L) != 0
&& Optional.ofNullable(endTime).orElse(0L) != 0) {
return workflowExecutor.getWorkflows(workflowName, version, startTime, endTime);
} else {
version =
Optional.ofNullable(version)
.orElseGet(
() -> {
WorkflowDef workflowDef =
metadataService.getWorkflowDef(workflowName, null);
return workflowDef.getVersion();
});
return workflowExecutor.getRunningWorkflowIds(workflowName, version);
}
}
/**
* Starts the decision task for a workflow.
*
* @param workflowId WorkflowId of the workflow.
*/
public void decideWorkflow(String workflowId) {
workflowExecutor.decide(workflowId);
}
/**
* Pauses the workflow given a workflowId.
*
* @param workflowId WorkflowId of the workflow.
*/
public void pauseWorkflow(String workflowId) {
workflowExecutor.pauseWorkflow(workflowId);
}
/**
* Resumes the workflow.
*
* @param workflowId WorkflowId of the workflow.
*/
public void resumeWorkflow(String workflowId) {
workflowExecutor.resumeWorkflow(workflowId);
}
/**
* Skips a given task from a current running workflow.
*
* @param workflowId WorkflowId of the workflow.
* @param taskReferenceName The task reference name.
* @param skipTaskRequest {@link SkipTaskRequest} for task you want to skip.
*/
public void skipTaskFromWorkflow(
String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) {
workflowExecutor.skipTaskFromWorkflow(workflowId, taskReferenceName, skipTaskRequest);
}
/**
* Reruns the workflow from a specific task.
*
* @param workflowId WorkflowId of the workflow you want to rerun.
* @param request (@link RerunWorkflowRequest) for the workflow.
* @return WorkflowId of the rerun workflow.
*/
public String rerunWorkflow(String workflowId, RerunWorkflowRequest request) {
request.setReRunFromWorkflowId(workflowId);
return workflowExecutor.rerun(request);
}
/**
* Restarts a completed workflow.
*
* @param workflowId WorkflowId of the workflow.
* @param useLatestDefinitions if true, use the latest workflow and task definitions upon
* restart
*/
public void restartWorkflow(String workflowId, boolean useLatestDefinitions) {
workflowExecutor.restart(workflowId, useLatestDefinitions);
}
/**
* Retries the last failed task.
*
* @param workflowId WorkflowId of the workflow.
*/
public void retryWorkflow(String workflowId, boolean resumeSubworkflowTasks) {
workflowExecutor.retry(workflowId, resumeSubworkflowTasks);
}
/**
* Resets callback times of all non-terminal SIMPLE tasks to 0.
*
* @param workflowId WorkflowId of the workflow.
*/
public void resetWorkflow(String workflowId) {
workflowExecutor.resetCallbacksForWorkflow(workflowId);
}
/**
* Terminate workflow execution.
*
* @param workflowId WorkflowId of the workflow.
* @param reason Reason for terminating the workflow.
*/
public void terminateWorkflow(String workflowId, String reason) {
workflowExecutor.terminateWorkflow(workflowId, reason);
}
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<WorkflowSummary> searchWorkflows(
int start, int size, String sort, String freeText, String query) {
return executionService.search(
query, freeText, start, size, Utils.convertStringToList(sort));
}
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<Workflow> searchWorkflowsV2(
int start, int size, String sort, String freeText, String query) {
return executionService.searchV2(
query, freeText, start, size, Utils.convertStringToList(sort));
}
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<WorkflowSummary> searchWorkflows(
int start, int size, List<String> sort, String freeText, String query) {
return executionService.search(query, freeText, start, size, sort);
}
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<Workflow> searchWorkflowsV2(
int start, int size, List<String> sort, String freeText, String query) {
return executionService.searchV2(query, freeText, start, size, sort);
}
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<WorkflowSummary> searchWorkflowsByTasks(
int start, int size, String sort, String freeText, String query) {
return executionService.searchWorkflowByTasks(
query, freeText, start, size, Utils.convertStringToList(sort));
}
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<Workflow> searchWorkflowsByTasksV2(
int start, int size, String sort, String freeText, String query) {
return executionService.searchWorkflowByTasksV2(
query, freeText, start, size, Utils.convertStringToList(sort));
}
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<WorkflowSummary> searchWorkflowsByTasks(
int start, int size, List<String> sort, String freeText, String query) {
return executionService.searchWorkflowByTasks(query, freeText, start, size, sort);
}
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<Workflow> searchWorkflowsByTasksV2(
int start, int size, List<String> sort, String freeText, String query) {
return executionService.searchWorkflowByTasksV2(query, freeText, start, size, sort);
}
/**
* Get the external storage location where the workflow input payload is stored/to be stored
*
* @param path the path for which the external storage location is to be populated
* @param operation the operation to be performed (read or write)
* @param type the type of payload (input or output)
* @return {@link ExternalStorageLocation} containing the uri and the path to the payload is
* stored in external storage
*/
public ExternalStorageLocation getExternalStorageLocation(
String path, String operation, String type) {
return executionService.getExternalStorageLocation(path, operation, type);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java | core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.TreeSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint;
import com.netflix.conductor.common.metadata.events.EventHandler;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDefSummary;
import com.netflix.conductor.common.model.BulkResponse;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.dao.EventHandlerDAO;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.validations.ValidationContext;
@Service
public class MetadataServiceImpl implements MetadataService {
private static final Logger LOGGER = LoggerFactory.getLogger(MetadataServiceImpl.class);
private final MetadataDAO metadataDAO;
private final EventHandlerDAO eventHandlerDAO;
public MetadataServiceImpl(
MetadataDAO metadataDAO,
EventHandlerDAO eventHandlerDAO,
ConductorProperties properties) {
this.metadataDAO = metadataDAO;
this.eventHandlerDAO = eventHandlerDAO;
ValidationContext.initialize(metadataDAO);
OwnerEmailMandatoryConstraint.WorkflowTaskValidValidator.setOwnerEmailMandatory(
properties.isOwnerEmailMandatory());
}
/**
* @param taskDefinitions Task Definitions to register
*/
public void registerTaskDef(List<TaskDef> taskDefinitions) {
for (TaskDef taskDefinition : taskDefinitions) {
taskDefinition.setCreatedBy(WorkflowContext.get().getClientApp());
taskDefinition.setCreateTime(System.currentTimeMillis());
taskDefinition.setUpdatedBy(null);
taskDefinition.setUpdateTime(null);
metadataDAO.createTaskDef(taskDefinition);
}
}
@Override
public void validateWorkflowDef(WorkflowDef workflowDef) {
// do nothing, WorkflowDef is annotated with @Valid and calling this method will validate it
}
/**
* @param taskDefinition Task Definition to be updated
*/
public void updateTaskDef(TaskDef taskDefinition) {
TaskDef existing = metadataDAO.getTaskDef(taskDefinition.getName());
if (existing == null) {
throw new NotFoundException("No such task by name %s", taskDefinition.getName());
}
taskDefinition.setUpdatedBy(WorkflowContext.get().getClientApp());
taskDefinition.setUpdateTime(System.currentTimeMillis());
taskDefinition.setCreateTime(existing.getCreateTime());
taskDefinition.setCreatedBy(existing.getCreatedBy());
metadataDAO.updateTaskDef(taskDefinition);
}
/**
* @param taskType Remove task definition
*/
public void unregisterTaskDef(String taskType) {
metadataDAO.removeTaskDef(taskType);
}
/**
* @return List of all the registered tasks
*/
public List<TaskDef> getTaskDefs() {
return metadataDAO.getAllTaskDefs();
}
/**
* @param taskType Task to retrieve
* @return Task Definition
*/
public TaskDef getTaskDef(String taskType) {
TaskDef taskDef = metadataDAO.getTaskDef(taskType);
if (taskDef == null) {
throw new NotFoundException("No such taskType found by name: %s", taskType);
}
return taskDef;
}
/**
* @param workflowDef Workflow definition to be updated
*/
public void updateWorkflowDef(WorkflowDef workflowDef) {
workflowDef.setUpdateTime(System.currentTimeMillis());
metadataDAO.updateWorkflowDef(workflowDef);
}
/**
* @param workflowDefList Workflow definitions to be updated.
*/
public BulkResponse<String> updateWorkflowDef(List<WorkflowDef> workflowDefList) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (WorkflowDef workflowDef : workflowDefList) {
try {
updateWorkflowDef(workflowDef);
bulkResponse.appendSuccessResponse(workflowDef.getName());
} catch (Exception e) {
LOGGER.error("bulk update workflow def failed, name {} ", workflowDef.getName(), e);
bulkResponse.appendFailedResponse(workflowDef.getName(), e.getMessage());
}
}
return bulkResponse;
}
/**
* @param name Name of the workflow to retrieve
* @param version Optional. Version. If null, then retrieves the latest
* @return Workflow definition
*/
public WorkflowDef getWorkflowDef(String name, Integer version) {
Optional<WorkflowDef> workflowDef;
if (version == null) {
workflowDef = metadataDAO.getLatestWorkflowDef(name);
} else {
workflowDef = metadataDAO.getWorkflowDef(name, version);
}
return workflowDef.orElseThrow(
() ->
new NotFoundException(
"No such workflow found by name: %s, version: %d", name, version));
}
/**
* @param name Name of the workflow to retrieve
* @return Latest version of the workflow definition
*/
public Optional<WorkflowDef> getLatestWorkflow(String name) {
return metadataDAO.getLatestWorkflowDef(name);
}
public List<WorkflowDef> getWorkflowDefs() {
return metadataDAO.getAllWorkflowDefs();
}
public void registerWorkflowDef(WorkflowDef workflowDef) {
workflowDef.setCreateTime(System.currentTimeMillis());
metadataDAO.createWorkflowDef(workflowDef);
}
/**
* @param name Name of the workflow definition to be removed
* @param version Version of the workflow definition to be removed
*/
public void unregisterWorkflowDef(String name, Integer version) {
metadataDAO.removeWorkflowDef(name, version);
}
/**
* @param eventHandler Event handler to be added. Will throw an exception if an event handler
* already exists with the name
*/
public void addEventHandler(EventHandler eventHandler) {
eventHandlerDAO.addEventHandler(eventHandler);
}
/**
* @param eventHandler Event handler to be updated.
*/
public void updateEventHandler(EventHandler eventHandler) {
eventHandlerDAO.updateEventHandler(eventHandler);
}
/**
* @param name Removes the event handler from the system
*/
public void removeEventHandlerStatus(String name) {
eventHandlerDAO.removeEventHandler(name);
}
/**
* @return All the event handlers registered in the system
*/
public List<EventHandler> getAllEventHandlers() {
return eventHandlerDAO.getAllEventHandlers();
}
/**
* @param event name of the event
* @param activeOnly if true, returns only the active handlers
* @return Returns the list of all the event handlers for a given event
*/
public List<EventHandler> getEventHandlersForEvent(String event, boolean activeOnly) {
return eventHandlerDAO.getEventHandlersForEvent(event, activeOnly);
}
@Override
public List<WorkflowDef> getWorkflowDefsLatestVersions() {
return metadataDAO.getAllWorkflowDefsLatestVersions();
}
public Map<String, ? extends Iterable<WorkflowDefSummary>> getWorkflowNamesAndVersions() {
List<WorkflowDef> workflowDefs = metadataDAO.getAllWorkflowDefs();
Map<String, TreeSet<WorkflowDefSummary>> retval = new HashMap<>();
for (WorkflowDef def : workflowDefs) {
String workflowName = def.getName();
WorkflowDefSummary summary = fromWorkflowDef(def);
retval.putIfAbsent(workflowName, new TreeSet<WorkflowDefSummary>());
TreeSet<WorkflowDefSummary> versions = retval.get(workflowName);
versions.add(summary);
}
return retval;
}
private WorkflowDefSummary fromWorkflowDef(WorkflowDef def) {
WorkflowDefSummary summary = new WorkflowDefSummary();
summary.setName(def.getName());
summary.setVersion(def.getVersion());
summary.setCreateTime(def.getCreateTime());
return summary;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/ExecutionLockService.java | core/src/main/java/com/netflix/conductor/service/ExecutionLockService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.sync.Lock;
import com.netflix.conductor.metrics.Monitors;
@Service
@Trace
public class ExecutionLockService {
private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionLockService.class);
private final ConductorProperties properties;
private final Lock lock;
private final long lockLeaseTime;
private final long lockTimeToTry;
public ExecutionLockService(ConductorProperties properties, Lock lock) {
this.properties = properties;
this.lock = lock;
this.lockLeaseTime = properties.getLockLeaseTime().toMillis();
this.lockTimeToTry = properties.getLockTimeToTry().toMillis();
}
/**
* Tries to acquire lock with reasonable timeToTry duration and lease time. Exits if a lock
* cannot be acquired. Considering that the workflow decide can be triggered through multiple
* entry points, and periodically through the sweeper service, do not block on acquiring the
* lock, as the order of execution of decides on a workflow doesn't matter.
*
* @param lockId
* @return
*/
public boolean acquireLock(String lockId) {
return acquireLock(lockId, lockTimeToTry, lockLeaseTime);
}
public boolean acquireLock(String lockId, long timeToTryMs) {
return acquireLock(lockId, timeToTryMs, lockLeaseTime);
}
public boolean acquireLock(String lockId, long timeToTryMs, long leaseTimeMs) {
if (properties.isWorkflowExecutionLockEnabled()) {
if (!lock.acquireLock(lockId, timeToTryMs, leaseTimeMs, TimeUnit.MILLISECONDS)) {
LOGGER.debug(
"Thread {} failed to acquire lock to lockId {}.",
Thread.currentThread().getId(),
lockId);
Monitors.recordAcquireLockUnsuccessful();
return false;
}
LOGGER.debug(
"Thread {} acquired lock to lockId {}.",
Thread.currentThread().getId(),
lockId);
}
return true;
}
/**
* Blocks until it gets the lock for workflowId
*
* @param lockId
*/
public void waitForLock(String lockId) {
if (properties.isWorkflowExecutionLockEnabled()) {
lock.acquireLock(lockId);
LOGGER.debug(
"Thread {} acquired lock to lockId {}.",
Thread.currentThread().getId(),
lockId);
}
}
public void releaseLock(String lockId) {
if (properties.isWorkflowExecutionLockEnabled()) {
lock.releaseLock(lockId);
LOGGER.debug(
"Thread {} released lock to lockId {}.",
Thread.currentThread().getId(),
lockId);
}
}
public void deleteLock(String lockId) {
if (properties.isWorkflowExecutionLockEnabled()) {
lock.deleteLock(lockId);
LOGGER.debug("Thread {} deleted lockId {}.", Thread.currentThread().getId(), lockId);
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java | core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.springframework.boot.info.BuildProperties;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Audit;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.events.EventQueueManager;
import com.netflix.conductor.core.reconciliation.WorkflowRepairService;
import com.netflix.conductor.core.utils.Utils;
import com.netflix.conductor.dao.QueueDAO;
@Audit
@Trace
@Service
public class AdminServiceImpl implements AdminService {
private final ConductorProperties properties;
private final ExecutionService executionService;
private final QueueDAO queueDAO;
private final WorkflowRepairService workflowRepairService;
private final EventQueueManager eventQueueManager;
private final BuildProperties buildProperties;
public AdminServiceImpl(
ConductorProperties properties,
ExecutionService executionService,
QueueDAO queueDAO,
Optional<WorkflowRepairService> workflowRepairService,
Optional<EventQueueManager> eventQueueManager,
Optional<BuildProperties> buildProperties) {
this.properties = properties;
this.executionService = executionService;
this.queueDAO = queueDAO;
this.workflowRepairService = workflowRepairService.orElse(null);
this.eventQueueManager = eventQueueManager.orElse(null);
this.buildProperties = buildProperties.orElse(null);
}
/**
* Get all the configuration parameters.
*
* @return all the configuration parameters.
*/
public Map<String, Object> getAllConfig() {
Map<String, Object> configs = properties.getAll();
configs.putAll(getBuildProperties());
return configs;
}
/**
* Get all build properties
*
* @return all the build properties.
*/
private Map<String, Object> getBuildProperties() {
if (buildProperties == null) return Collections.emptyMap();
Map<String, Object> buildProps = new HashMap<>();
buildProps.put("version", buildProperties.getVersion());
buildProps.put("buildDate", buildProperties.getTime());
return buildProps;
}
/**
* Get the list of pending tasks for a given task type.
*
* @param taskType Name of the task
* @param start Start index of pagination
* @param count Number of entries
* @return list of pending {@link Task}
*/
public List<Task> getListOfPendingTask(String taskType, Integer start, Integer count) {
List<Task> tasks = executionService.getPendingTasksForTaskType(taskType);
int total = start + count;
total = Math.min(tasks.size(), total);
if (start > tasks.size()) {
start = tasks.size();
}
return tasks.subList(start, total);
}
@Override
public boolean verifyAndRepairWorkflowConsistency(String workflowId) {
if (workflowRepairService == null) {
throw new IllegalStateException(
WorkflowRepairService.class.getSimpleName() + " is disabled.");
}
return workflowRepairService.verifyAndRepairWorkflow(workflowId, true);
}
/**
* Queue up the workflow for sweep.
*
* @param workflowId Id of the workflow
* @return the id of the workflow instance that can be use for tracking.
*/
public String requeueSweep(String workflowId) {
boolean pushed =
queueDAO.pushIfNotExists(
Utils.DECIDER_QUEUE,
workflowId,
properties.getWorkflowOffsetTimeout().getSeconds());
return pushed + "." + workflowId;
}
/**
* Get registered queues.
*
* @param verbose `true|false` for verbose logs
* @return map of event queues
*/
public Map<String, ?> getEventQueues(boolean verbose) {
if (eventQueueManager == null) {
throw new IllegalStateException("Event processing is DISABLED");
}
return (verbose ? eventQueueManager.getQueueSizes() : eventQueueManager.getQueues());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/EventService.java | core/src/main/java/com/netflix/conductor/service/EventService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import org.springframework.validation.annotation.Validated;
import com.netflix.conductor.common.metadata.events.EventHandler;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.NotNull;
@Validated
public interface EventService {
/**
* Add a new event handler.
*
* @param eventHandler Instance of {@link EventHandler}
*/
void addEventHandler(
@NotNull(message = "EventHandler cannot be null.") @Valid EventHandler eventHandler);
/**
* Update an existing event handler.
*
* @param eventHandler Instance of {@link EventHandler}
*/
void updateEventHandler(
@NotNull(message = "EventHandler cannot be null.") @Valid EventHandler eventHandler);
/**
* Remove an event handler.
*
* @param name Event name
*/
void removeEventHandlerStatus(
@NotEmpty(message = "EventHandler name cannot be null or empty.") String name);
/**
* Get all the event handlers.
*
* @return list of {@link EventHandler}
*/
List<EventHandler> getEventHandlers();
/**
* Get event handlers for a given event.
*
* @param event Event Name
* @param activeOnly `true|false` for active only events
* @return list of {@link EventHandler}
*/
List<EventHandler> getEventHandlersForEvent(
@NotEmpty(message = "Event cannot be null or empty.") String event, boolean activeOnly);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/MetadataService.java | core/src/main/java/com/netflix/conductor/service/MetadataService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.springframework.validation.annotation.Validated;
import com.netflix.conductor.common.metadata.events.EventHandler;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDefSummary;
import com.netflix.conductor.common.model.BulkResponse;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
@Validated
public interface MetadataService {
/**
* @param taskDefinitions Task Definitions to register
*/
void registerTaskDef(
@NotNull(message = "TaskDefList cannot be empty or null")
@Size(min = 1, message = "TaskDefList is empty")
List<@Valid TaskDef> taskDefinitions);
/**
* @param taskDefinition Task Definition to be updated
*/
void updateTaskDef(@NotNull(message = "TaskDef cannot be null") @Valid TaskDef taskDefinition);
/**
* @param taskType Remove task definition
*/
void unregisterTaskDef(@NotEmpty(message = "TaskName cannot be null or empty") String taskType);
/**
* @return List of all the registered tasks
*/
List<TaskDef> getTaskDefs();
/**
* @param taskType Task to retrieve
* @return Task Definition
*/
TaskDef getTaskDef(@NotEmpty(message = "TaskType cannot be null or empty") String taskType);
/**
* @param def Workflow definition to be updated
*/
void updateWorkflowDef(@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef def);
/**
* @param workflowDefList Workflow definitions to be updated.
*/
BulkResponse<String> updateWorkflowDef(
@NotNull(message = "WorkflowDef list name cannot be null or empty")
@Size(min = 1, message = "WorkflowDefList is empty")
List<@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef>
workflowDefList);
/**
* @param name Name of the workflow to retrieve
* @param version Optional. Version. If null, then retrieves the latest
* @return Workflow definition
*/
WorkflowDef getWorkflowDef(
@NotEmpty(message = "Workflow name cannot be null or empty") String name,
Integer version);
/**
* @param name Name of the workflow to retrieve
* @return Latest version of the workflow definition
*/
Optional<WorkflowDef> getLatestWorkflow(
@NotEmpty(message = "Workflow name cannot be null or empty") String name);
/**
* @return Returns all workflow defs (all versions)
*/
List<WorkflowDef> getWorkflowDefs();
/**
* @return Returns workflow names and versions only (no definition bodies)
*/
Map<String, ? extends Iterable<WorkflowDefSummary>> getWorkflowNamesAndVersions();
void registerWorkflowDef(
@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef workflowDef);
/**
* Validates a {@link WorkflowDef}.
*
* @param workflowDef The {@link WorkflowDef} object.
*/
default void validateWorkflowDef(
@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef workflowDef) {
// do nothing, WorkflowDef is annotated with @Valid and calling this method will validate it
}
/**
* @param name Name of the workflow definition to be removed
* @param version Version of the workflow definition to be removed
*/
void unregisterWorkflowDef(
@NotEmpty(message = "Workflow name cannot be null or empty") String name,
@NotNull(message = "Version cannot be null") Integer version);
/**
* @param eventHandler Event handler to be added. Will throw an exception if an event handler
* already exists with the name
*/
void addEventHandler(
@NotNull(message = "EventHandler cannot be null") @Valid EventHandler eventHandler);
/**
* @param eventHandler Event handler to be updated.
*/
void updateEventHandler(
@NotNull(message = "EventHandler cannot be null") @Valid EventHandler eventHandler);
/**
* @param name Removes the event handler from the system
*/
void removeEventHandlerStatus(
@NotEmpty(message = "EventName cannot be null or empty") String name);
/**
* @return All the event handlers registered in the system
*/
List<EventHandler> getAllEventHandlers();
/**
* @param event name of the event
* @param activeOnly if true, returns only the active handlers
* @return Returns the list of all the event handlers for a given event
*/
List<EventHandler> getEventHandlersForEvent(
@NotEmpty(message = "EventName cannot be null or empty") String event,
boolean activeOnly);
List<WorkflowDef> getWorkflowDefsLatestVersions();
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/ExecutionService.java | core/src/main/java/com/netflix/conductor/service/ExecutionService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.metadata.tasks.*;
import com.netflix.conductor.common.run.*;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.common.utils.ExternalPayloadStorage.Operation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.listener.TaskStatusListener;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.core.utils.Utils;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
@Trace
@Service
public class ExecutionService {
private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionService.class);
private final WorkflowExecutor workflowExecutor;
private final ExecutionDAOFacade executionDAOFacade;
private final QueueDAO queueDAO;
private final ExternalPayloadStorage externalPayloadStorage;
private final SystemTaskRegistry systemTaskRegistry;
private final TaskStatusListener taskStatusListener;
private final long queueTaskMessagePostponeSecs;
private static final int MAX_POLL_TIMEOUT_MS = 5000;
private static final int POLL_COUNT_ONE = 1;
private static final int POLLING_TIMEOUT_IN_MS = 100;
public ExecutionService(
WorkflowExecutor workflowExecutor,
ExecutionDAOFacade executionDAOFacade,
QueueDAO queueDAO,
ConductorProperties properties,
ExternalPayloadStorage externalPayloadStorage,
SystemTaskRegistry systemTaskRegistry,
TaskStatusListener taskStatusListener) {
this.workflowExecutor = workflowExecutor;
this.executionDAOFacade = executionDAOFacade;
this.queueDAO = queueDAO;
this.externalPayloadStorage = externalPayloadStorage;
this.queueTaskMessagePostponeSecs =
properties.getTaskExecutionPostponeDuration().getSeconds();
this.systemTaskRegistry = systemTaskRegistry;
this.taskStatusListener = taskStatusListener;
}
public Task poll(String taskType, String workerId) {
return poll(taskType, workerId, null);
}
public Task poll(String taskType, String workerId, String domain) {
List<Task> tasks = poll(taskType, workerId, domain, 1, 100);
if (tasks.isEmpty()) {
return null;
}
return tasks.get(0);
}
public List<Task> poll(String taskType, String workerId, int count, int timeoutInMilliSecond) {
return poll(taskType, workerId, null, count, timeoutInMilliSecond);
}
public List<Task> poll(
String taskType, String workerId, String domain, int count, int timeoutInMilliSecond) {
if (timeoutInMilliSecond > MAX_POLL_TIMEOUT_MS) {
throw new IllegalArgumentException(
"Long Poll Timeout value cannot be more than 5 seconds");
}
String queueName = QueueUtils.getQueueName(taskType, domain, null, null);
List<String> taskIds = new LinkedList<>();
List<Task> tasks = new LinkedList<>();
try {
taskIds = queueDAO.pop(queueName, count, timeoutInMilliSecond);
} catch (Exception e) {
LOGGER.error(
"Error polling for task: {} from worker: {} in domain: {}, count: {}",
taskType,
workerId,
domain,
count,
e);
Monitors.error(this.getClass().getCanonicalName(), "taskPoll");
Monitors.recordTaskPollError(taskType, domain, e.getClass().getSimpleName());
}
for (String taskId : taskIds) {
try {
TaskModel taskModel = executionDAOFacade.getTaskModel(taskId);
if (taskModel == null || taskModel.getStatus().isTerminal()) {
// Remove taskId(s) without a valid Task/terminal state task from the queue
queueDAO.remove(queueName, taskId);
LOGGER.debug("Removed task: {} from the queue: {}", taskId, queueName);
continue;
}
if (executionDAOFacade.exceedsInProgressLimit(taskModel)) {
// Postpone this message, so that it would be available for poll again.
queueDAO.postpone(
queueName,
taskId,
taskModel.getWorkflowPriority(),
queueTaskMessagePostponeSecs);
LOGGER.debug(
"Postponed task: {} in queue: {} by {} seconds",
taskId,
queueName,
queueTaskMessagePostponeSecs);
continue;
}
TaskDef taskDef =
taskModel.getTaskDefinition().isPresent()
? taskModel.getTaskDefinition().get()
: null;
if (taskModel.getRateLimitPerFrequency() > 0
&& executionDAOFacade.exceedsRateLimitPerFrequency(taskModel, taskDef)) {
// Postpone this message, so that it would be available for poll again.
queueDAO.postpone(
queueName,
taskId,
taskModel.getWorkflowPriority(),
queueTaskMessagePostponeSecs);
LOGGER.debug(
"RateLimit Execution limited for {}:{}, limit:{}",
taskId,
taskModel.getTaskDefName(),
taskModel.getRateLimitPerFrequency());
continue;
}
taskModel.setStatus(TaskModel.Status.IN_PROGRESS);
if (taskModel.getStartTime() == 0) {
taskModel.setStartTime(System.currentTimeMillis());
Monitors.recordQueueWaitTime(
taskModel.getTaskDefName(), taskModel.getQueueWaitTime());
}
taskModel.setCallbackAfterSeconds(
0); // reset callbackAfterSeconds when giving the task to the worker
taskModel.setWorkerId(workerId);
taskModel.incrementPollCount();
executionDAOFacade.updateTask(taskModel);
tasks.add(taskModel.toTask());
} catch (Exception e) {
// db operation failed for dequeued message, re-enqueue with a delay
LOGGER.warn(
"DB operation failed for task: {}, postponing task in queue", taskId, e);
Monitors.recordTaskPollError(taskType, domain, e.getClass().getSimpleName());
queueDAO.postpone(queueName, taskId, 0, queueTaskMessagePostponeSecs);
}
}
taskIds.stream()
.map(executionDAOFacade::getTaskModel)
.filter(Objects::nonNull)
.filter(task -> TaskModel.Status.IN_PROGRESS.equals(task.getStatus()))
.forEach(
task -> {
try {
taskStatusListener.onTaskInProgress(task);
} catch (Exception e) {
String errorMsg =
String.format(
"Error while notifying TaskStatusListener: %s for workflow: %s",
task.getTaskId(), task.getWorkflowInstanceId());
LOGGER.error(errorMsg, e);
}
});
executionDAOFacade.updateTaskLastPoll(taskType, domain, workerId);
Monitors.recordTaskPoll(queueName);
tasks.forEach(this::ackTaskReceived);
return tasks;
}
public Task getLastPollTask(String taskType, String workerId, String domain) {
List<Task> tasks = poll(taskType, workerId, domain, POLL_COUNT_ONE, POLLING_TIMEOUT_IN_MS);
if (tasks.isEmpty()) {
LOGGER.debug(
"No Task available for the poll: /tasks/poll/{}?{}&{}",
taskType,
workerId,
domain);
return null;
}
Task task = tasks.get(0);
ackTaskReceived(task);
LOGGER.debug(
"The Task {} being returned for /tasks/poll/{}?{}&{}",
task,
taskType,
workerId,
domain);
return task;
}
public List<PollData> getPollData(String taskType) {
return executionDAOFacade.getTaskPollData(taskType);
}
public List<PollData> getAllPollData() {
try {
return executionDAOFacade.getAllPollData();
} catch (UnsupportedOperationException uoe) {
List<PollData> allPollData = new ArrayList<>();
Map<String, Long> queueSizes = queueDAO.queuesDetail();
queueSizes
.keySet()
.forEach(
queueName -> {
try {
if (!queueName.contains(QueueUtils.DOMAIN_SEPARATOR)) {
allPollData.addAll(
getPollData(
QueueUtils.getQueueNameWithoutDomain(
queueName)));
}
} catch (Exception e) {
LOGGER.error("Unable to fetch all poll data!", e);
}
});
return allPollData;
}
}
public void terminateWorkflow(String workflowId, String reason) {
workflowExecutor.terminateWorkflow(workflowId, reason);
}
public TaskModel updateTask(TaskResult taskResult) {
return workflowExecutor.updateTask(taskResult);
}
public List<Task> getTasks(String taskType, String startKey, int count) {
return executionDAOFacade.getTasksByName(taskType, startKey, count);
}
public Task getTask(String taskId) {
return executionDAOFacade.getTask(taskId);
}
public Task getPendingTaskForWorkflow(String taskReferenceName, String workflowId) {
List<TaskModel> tasks = executionDAOFacade.getTaskModelsForWorkflow(workflowId);
Stream<TaskModel> taskStream =
tasks.stream().filter(task -> !task.getStatus().isTerminal());
Optional<TaskModel> found =
taskStream
.filter(task -> task.getReferenceTaskName().equals(taskReferenceName))
.findFirst();
if (found.isPresent()) {
return found.get().toTask();
}
// If no task is found, let's check if there is one inside an iteration
found =
tasks.stream()
.filter(task -> !task.getStatus().isTerminal())
.filter(
task ->
TaskUtils.removeIterationFromTaskRefName(
task.getReferenceTaskName())
.equals(taskReferenceName))
.findFirst();
return found.map(TaskModel::toTask).orElse(null);
}
/**
* This method removes the task from the un-acked Queue
*
* @param taskId: the taskId that needs to be updated and removed from the unacked queue
* @return True in case of successful removal of the taskId from the un-acked queue
*/
public boolean ackTaskReceived(String taskId) {
return Optional.ofNullable(getTask(taskId)).map(this::ackTaskReceived).orElse(false);
}
public boolean ackTaskReceived(Task task) {
return queueDAO.ack(QueueUtils.getQueueName(task), task.getTaskId());
}
public Map<String, Integer> getTaskQueueSizes(List<String> taskDefNames) {
Map<String, Integer> sizes = new HashMap<>();
for (String taskDefName : taskDefNames) {
sizes.put(taskDefName, getTaskQueueSize(taskDefName));
}
return sizes;
}
public Integer getTaskQueueSize(String queueName) {
return queueDAO.getSize(queueName);
}
public void removeTaskFromQueue(String taskId) {
Task task = getTask(taskId);
if (task == null) {
throw new NotFoundException("No such task found by taskId: %s", taskId);
}
queueDAO.remove(QueueUtils.getQueueName(task), taskId);
}
public int requeuePendingTasks(String taskType) {
int count = 0;
List<Task> tasks = getPendingTasksForTaskType(taskType);
for (Task pending : tasks) {
if (systemTaskRegistry.isSystemTask(pending.getTaskType())) {
continue;
}
if (pending.getStatus().isTerminal()) {
continue;
}
LOGGER.debug(
"Requeuing Task: {} of taskType: {} in Workflow: {}",
pending.getTaskId(),
pending.getTaskType(),
pending.getWorkflowInstanceId());
boolean pushed = requeue(pending);
if (pushed) {
count++;
}
}
return count;
}
private boolean requeue(Task pending) {
long callback = pending.getCallbackAfterSeconds();
if (callback < 0) {
callback = 0;
}
queueDAO.remove(QueueUtils.getQueueName(pending), pending.getTaskId());
long now = System.currentTimeMillis();
callback = callback - ((now - pending.getUpdateTime()) / 1000);
if (callback < 0) {
callback = 0;
}
return queueDAO.pushIfNotExists(
QueueUtils.getQueueName(pending),
pending.getTaskId(),
pending.getWorkflowPriority(),
callback);
}
public List<Workflow> getWorkflowInstances(
String workflowName,
String correlationId,
boolean includeClosed,
boolean includeTasks) {
List<Workflow> workflows =
executionDAOFacade.getWorkflowsByCorrelationId(workflowName, correlationId, false);
return workflows.stream()
.parallel()
.filter(
workflow -> {
if (includeClosed
|| workflow.getStatus()
.equals(Workflow.WorkflowStatus.RUNNING)) {
// including tasks for subset of workflows to increase performance
if (includeTasks) {
List<Task> tasks =
executionDAOFacade.getTasksForWorkflow(
workflow.getWorkflowId());
tasks.sort(Comparator.comparingInt(Task::getSeq));
workflow.setTasks(tasks);
}
return true;
} else {
return false;
}
})
.collect(Collectors.toList());
}
public Workflow getExecutionStatus(String workflowId, boolean includeTasks) {
return executionDAOFacade.getWorkflow(workflowId, includeTasks);
}
public List<String> getRunningWorkflows(String workflowName, int version) {
return executionDAOFacade.getRunningWorkflowIds(workflowName, version);
}
public void removeWorkflow(String workflowId, boolean archiveWorkflow) {
executionDAOFacade.removeWorkflow(workflowId, archiveWorkflow);
}
public SearchResult<WorkflowSummary> search(
String query, String freeText, int start, int size, List<String> sortOptions) {
return executionDAOFacade.searchWorkflowSummary(query, freeText, start, size, sortOptions);
}
public SearchResult<Workflow> searchV2(
String query, String freeText, int start, int size, List<String> sortOptions) {
SearchResult<String> result =
executionDAOFacade.searchWorkflows(query, freeText, start, size, sortOptions);
List<Workflow> workflows =
result.getResults().stream()
.parallel()
.map(
workflowId -> {
try {
return executionDAOFacade.getWorkflow(workflowId, false);
} catch (Exception e) {
LOGGER.error(
"Error fetching workflow by id: {}", workflowId, e);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
int missing = result.getResults().size() - workflows.size();
long totalHits = result.getTotalHits() - missing;
return new SearchResult<>(totalHits, workflows);
}
public SearchResult<WorkflowSummary> searchWorkflowByTasks(
String query, String freeText, int start, int size, List<String> sortOptions) {
SearchResult<TaskSummary> taskSummarySearchResult =
searchTaskSummary(query, freeText, start, size, sortOptions);
List<WorkflowSummary> workflowSummaries =
taskSummarySearchResult.getResults().stream()
.parallel()
.map(
taskSummary -> {
try {
String workflowId = taskSummary.getWorkflowId();
return new WorkflowSummary(
executionDAOFacade.getWorkflow(workflowId, false));
} catch (Exception e) {
LOGGER.error(
"Error fetching workflow by id: {}",
taskSummary.getWorkflowId(),
e);
return null;
}
})
.filter(Objects::nonNull)
.distinct()
.collect(Collectors.toList());
int missing = taskSummarySearchResult.getResults().size() - workflowSummaries.size();
long totalHits = taskSummarySearchResult.getTotalHits() - missing;
return new SearchResult<>(totalHits, workflowSummaries);
}
public SearchResult<Workflow> searchWorkflowByTasksV2(
String query, String freeText, int start, int size, List<String> sortOptions) {
SearchResult<TaskSummary> taskSummarySearchResult =
searchTasks(query, freeText, start, size, sortOptions);
List<Workflow> workflows =
taskSummarySearchResult.getResults().stream()
.parallel()
.map(
taskSummary -> {
try {
String workflowId = taskSummary.getWorkflowId();
return executionDAOFacade.getWorkflow(workflowId, false);
} catch (Exception e) {
LOGGER.error(
"Error fetching workflow by id: {}",
taskSummary.getWorkflowId(),
e);
return null;
}
})
.filter(Objects::nonNull)
.distinct()
.collect(Collectors.toList());
int missing = taskSummarySearchResult.getResults().size() - workflows.size();
long totalHits = taskSummarySearchResult.getTotalHits() - missing;
return new SearchResult<>(totalHits, workflows);
}
public SearchResult<TaskSummary> searchTasks(
String query, String freeText, int start, int size, List<String> sortOptions) {
SearchResult<String> result =
executionDAOFacade.searchTasks(query, freeText, start, size, sortOptions);
List<TaskSummary> workflows =
result.getResults().stream()
.parallel()
.map(
task -> {
try {
return new TaskSummary(executionDAOFacade.getTask(task));
} catch (Exception e) {
LOGGER.error("Error fetching task by id: {}", task, e);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
int missing = result.getResults().size() - workflows.size();
long totalHits = result.getTotalHits() - missing;
return new SearchResult<>(totalHits, workflows);
}
public SearchResult<TaskSummary> searchTaskSummary(
String query, String freeText, int start, int size, List<String> sortOptions) {
return executionDAOFacade.searchTaskSummary(query, freeText, start, size, sortOptions);
}
public SearchResult<TaskSummary> getSearchTasks(
String query,
String freeText,
int start,
/*@Max(value = MAX_SEARCH_SIZE, message = "Cannot return more than {value} workflows." +
" Please use pagination.")*/ int size,
String sortString) {
return searchTaskSummary(
query, freeText, start, size, Utils.convertStringToList(sortString));
}
public SearchResult<Task> getSearchTasksV2(
String query, String freeText, int start, int size, String sortString) {
SearchResult<String> result =
executionDAOFacade.searchTasks(
query, freeText, start, size, Utils.convertStringToList(sortString));
List<Task> tasks =
result.getResults().stream()
.parallel()
.map(
task -> {
try {
return executionDAOFacade.getTask(task);
} catch (Exception e) {
LOGGER.error("Error fetching task by id: {}", task, e);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
int missing = result.getResults().size() - tasks.size();
long totalHits = result.getTotalHits() - missing;
return new SearchResult<>(totalHits, tasks);
}
public List<Task> getPendingTasksForTaskType(String taskType) {
return executionDAOFacade.getPendingTasksForTaskType(taskType);
}
public boolean addEventExecution(EventExecution eventExecution) {
return executionDAOFacade.addEventExecution(eventExecution);
}
public void removeEventExecution(EventExecution eventExecution) {
executionDAOFacade.removeEventExecution(eventExecution);
}
public void updateEventExecution(EventExecution eventExecution) {
executionDAOFacade.updateEventExecution(eventExecution);
}
/**
* @param queue Name of the registered queueDAO
* @param msg Message
*/
public void addMessage(String queue, Message msg) {
executionDAOFacade.addMessage(queue, msg);
}
/**
* Adds task logs
*
* @param taskId Id of the task
* @param log logs
*/
public void log(String taskId, String log) {
TaskExecLog executionLog = new TaskExecLog();
executionLog.setTaskId(taskId);
executionLog.setLog(log);
executionLog.setCreatedTime(System.currentTimeMillis());
executionDAOFacade.addTaskExecLog(Collections.singletonList(executionLog));
}
/**
* @param taskId Id of the task for which to retrieve logs
* @return Execution Logs (logged by the worker)
*/
public List<TaskExecLog> getTaskLogs(String taskId) {
return executionDAOFacade.getTaskExecutionLogs(taskId);
}
/**
* Get external uri for the payload
*
* @param path the path for which the external storage location is to be populated
* @param operation the type of {@link Operation} to be performed
* @param type the {@link PayloadType} at the external uri
* @return the external uri at which the payload is stored/to be stored
*/
public ExternalStorageLocation getExternalStorageLocation(
String path, String operation, String type) {
try {
ExternalPayloadStorage.Operation payloadOperation =
ExternalPayloadStorage.Operation.valueOf(StringUtils.upperCase(operation));
ExternalPayloadStorage.PayloadType payloadType =
ExternalPayloadStorage.PayloadType.valueOf(StringUtils.upperCase(type));
return externalPayloadStorage.getLocation(payloadOperation, payloadType, path);
} catch (Exception e) {
String errorMsg =
String.format(
"Invalid input - Operation: %s, PayloadType: %s", operation, type);
LOGGER.error(errorMsg);
throw new IllegalArgumentException(errorMsg);
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java | core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import org.springframework.validation.annotation.Validated;
import com.netflix.conductor.common.model.BulkResponse;
import com.netflix.conductor.model.WorkflowModel;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Size;
@Validated
public interface WorkflowBulkService {
int MAX_REQUEST_ITEMS = 1000;
BulkResponse<String> pauseWorkflow(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds);
BulkResponse<String> resumeWorkflow(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds);
BulkResponse<String> restart(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds,
boolean useLatestDefinitions);
BulkResponse<String> retry(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds);
BulkResponse<String> terminate(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds,
String reason);
BulkResponse<String> deleteWorkflow(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds,
boolean archiveWorkflow);
BulkResponse<String> terminateRemove(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds,
String reason,
boolean archiveWorkflow);
BulkResponse<WorkflowModel> searchWorkflow(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
message =
"Cannot process more than {max} workflows. Please use multiple requests.")
List<String> workflowIds,
boolean includeTasks);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/EventServiceImpl.java | core/src/main/java/com/netflix/conductor/service/EventServiceImpl.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import org.springframework.stereotype.Service;
import com.netflix.conductor.common.metadata.events.EventHandler;
import com.netflix.conductor.core.events.EventQueues;
@Service
public class EventServiceImpl implements EventService {
private final MetadataService metadataService;
public EventServiceImpl(MetadataService metadataService, EventQueues eventQueues) {
this.metadataService = metadataService;
}
/**
* Add a new event handler.
*
* @param eventHandler Instance of {@link EventHandler}
*/
public void addEventHandler(EventHandler eventHandler) {
metadataService.addEventHandler(eventHandler);
}
/**
* Update an existing event handler.
*
* @param eventHandler Instance of {@link EventHandler}
*/
public void updateEventHandler(EventHandler eventHandler) {
metadataService.updateEventHandler(eventHandler);
}
/**
* Remove an event handler.
*
* @param name Event name
*/
public void removeEventHandlerStatus(String name) {
metadataService.removeEventHandlerStatus(name);
}
/**
* Get all the event handlers.
*
* @return list of {@link EventHandler}
*/
public List<EventHandler> getEventHandlers() {
return metadataService.getAllEventHandlers();
}
/**
* Get event handlers for a given event.
*
* @param event Event Name
* @param activeOnly `true|false` for active only events
* @return list of {@link EventHandler}
*/
public List<EventHandler> getEventHandlersForEvent(String event, boolean activeOnly) {
return metadataService.getEventHandlersForEvent(event, activeOnly);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/WorkflowService.java | core/src/main/java/com/netflix/conductor/service/WorkflowService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import java.util.Map;
import org.springframework.validation.annotation.Validated;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.WorkflowSummary;
import jakarta.validation.Valid;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.NotNull;
@Validated
public interface WorkflowService {
/**
* Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain.
*
* @param startWorkflowRequest StartWorkflow request for the workflow you want to start.
* @return the id of the workflow instance that can be use for tracking.
*/
String startWorkflow(
@NotNull(message = "StartWorkflowRequest cannot be null") @Valid
StartWorkflowRequest startWorkflowRequest);
/**
* Start a new workflow. Returns the ID of the workflow instance that can be later used for
* tracking.
*
* @param name Name of the workflow you want to start.
* @param version Version of the workflow you want to start.
* @param correlationId CorrelationID of the workflow you want to start.
* @param priority Priority of the workflow you want to start.
* @param input Input to the workflow you want to start.
* @return the id of the workflow instance that can be use for tracking.
*/
String startWorkflow(
@NotEmpty(message = "Workflow name cannot be null or empty") String name,
Integer version,
String correlationId,
@Min(value = 0, message = "0 is the minimum priority value")
@Max(value = 99, message = "99 is the maximum priority value")
Integer priority,
Map<String, Object> input);
/**
* Start a new workflow. Returns the ID of the workflow instance that can be later used for
* tracking.
*
* @param name Name of the workflow you want to start.
* @param version Version of the workflow you want to start.
* @param correlationId CorrelationID of the workflow you want to start.
* @param priority Priority of the workflow you want to start.
* @param input Input to the workflow you want to start.
* @param externalInputPayloadStoragePath
* @param taskToDomain
* @param workflowDef - workflow definition
* @return the id of the workflow instance that can be use for tracking.
*/
String startWorkflow(
String name,
Integer version,
String correlationId,
Integer priority,
Map<String, Object> input,
String externalInputPayloadStoragePath,
Map<String, String> taskToDomain,
WorkflowDef workflowDef);
/**
* Lists workflows for the given correlation id.
*
* @param name Name of the workflow.
* @param correlationId CorrelationID of the workflow you want to list.
* @param includeClosed IncludeClosed workflow which are not running.
* @param includeTasks Includes tasks associated with workflows.
* @return a list of {@link Workflow}
*/
List<Workflow> getWorkflows(
@NotEmpty(message = "Workflow name cannot be null or empty") String name,
String correlationId,
boolean includeClosed,
boolean includeTasks);
/**
* Lists workflows for the given correlation id.
*
* @param name Name of the workflow.
* @param includeClosed CorrelationID of the workflow you want to start.
* @param includeTasks IncludeClosed workflow which are not running.
* @param correlationIds Includes tasks associated with workflows.
* @return a {@link Map} of {@link String} as key and a list of {@link Workflow} as value
*/
Map<String, List<Workflow>> getWorkflows(
@NotEmpty(message = "Workflow name cannot be null or empty") String name,
boolean includeClosed,
boolean includeTasks,
List<String> correlationIds);
/**
* Gets the workflow by workflow Id.
*
* @param workflowId Id of the workflow.
* @param includeTasks Includes tasks associated with workflow.
* @return an instance of {@link Workflow}
*/
Workflow getExecutionStatus(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
boolean includeTasks);
/**
* Removes the workflow from the system.
*
* @param workflowId WorkflowID of the workflow you want to remove from system.
* @param archiveWorkflow Archives the workflow and associated tasks instead of removing them.
*/
void deleteWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
boolean archiveWorkflow);
/**
* Retrieves all the running workflows.
*
* @param workflowName Name of the workflow.
* @param version Version of the workflow.
* @param startTime Starttime of the workflow.
* @param endTime EndTime of the workflow
* @return a list of workflow Ids.
*/
List<String> getRunningWorkflows(
@NotEmpty(message = "Workflow name cannot be null or empty.") String workflowName,
Integer version,
Long startTime,
Long endTime);
/**
* Starts the decision task for a workflow.
*
* @param workflowId WorkflowId of the workflow.
*/
void decideWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId);
/**
* Pauses the workflow given a workflowId.
*
* @param workflowId WorkflowId of the workflow.
*/
void pauseWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId);
/**
* Resumes the workflow.
*
* @param workflowId WorkflowId of the workflow.
*/
void resumeWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId);
/**
* Skips a given task from a current running workflow.
*
* @param workflowId WorkflowId of the workflow.
* @param taskReferenceName The task reference name.
* @param skipTaskRequest {@link SkipTaskRequest} for task you want to skip.
*/
void skipTaskFromWorkflow(
@NotEmpty(message = "WorkflowId name cannot be null or empty.") String workflowId,
@NotEmpty(message = "TaskReferenceName cannot be null or empty.")
String taskReferenceName,
SkipTaskRequest skipTaskRequest);
/**
* Reruns the workflow from a specific task.
*
* @param workflowId WorkflowId of the workflow you want to rerun.
* @param request (@link RerunWorkflowRequest) for the workflow.
* @return WorkflowId of the rerun workflow.
*/
String rerunWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
@NotNull(message = "RerunWorkflowRequest cannot be null.")
RerunWorkflowRequest request);
/**
* Restarts a completed workflow.
*
* @param workflowId WorkflowId of the workflow.
* @param useLatestDefinitions if true, use the latest workflow and task definitions upon
* restart
*/
void restartWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
boolean useLatestDefinitions);
/**
* Retries the last failed task.
*
* @param workflowId WorkflowId of the workflow.
*/
void retryWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
boolean resumeSubworkflowTasks);
/**
* Resets callback times of all non-terminal SIMPLE tasks to 0.
*
* @param workflowId WorkflowId of the workflow.
*/
void resetWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId);
/**
* Terminate workflow execution.
*
* @param workflowId WorkflowId of the workflow.
* @param reason Reason for terminating the workflow.
*/
void terminateWorkflow(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
String reason);
/**
* Terminate workflow execution, and then remove it from the system. Acts as terminate and
* remove combined.
*
* @param workflowId WorkflowId of the workflow
* @param reason Reason for terminating the workflow.
* @param archiveWorkflow Archives the workflow and associated tasks instead of removing them.
*/
void terminateRemove(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId,
String reason,
boolean archiveWorkflow);
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<WorkflowSummary> searchWorkflows(
int start,
@Max(
value = 5_000,
message =
"Cannot return more than {value} workflows. Please use pagination.")
int size,
String sort,
String freeText,
String query);
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<Workflow> searchWorkflowsV2(
int start,
@Max(
value = 5_000,
message =
"Cannot return more than {value} workflows. Please use pagination.")
int size,
String sort,
String freeText,
String query);
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<WorkflowSummary> searchWorkflows(
int start,
@Max(
value = 5_000,
message =
"Cannot return more than {value} workflows. Please use pagination.")
int size,
List<String> sort,
String freeText,
String query);
/**
* Search for workflows based on payload and given parameters. Use sort options as sort ASCor
* DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<Workflow> searchWorkflowsV2(
int start,
@Max(
value = 5_000,
message =
"Cannot return more than {value} workflows. Please use pagination.")
int size,
List<String> sort,
String freeText,
String query);
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<WorkflowSummary> searchWorkflowsByTasks(
int start, int size, String sort, String freeText, String query);
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<Workflow> searchWorkflowsByTasksV2(
int start, int size, String sort, String freeText, String query);
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<WorkflowSummary> searchWorkflowsByTasks(
int start, int size, List<String> sort, String freeText, String query);
/**
* Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g.
* sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort list of sorting options, separated by "|" delimiter
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
SearchResult<Workflow> searchWorkflowsByTasksV2(
int start, int size, List<String> sort, String freeText, String query);
/**
* Get the external storage location where the workflow input payload is stored/to be stored
*
* @param path the path for which the external storage location is to be populated
* @param operation the operation to be performed (read or write)
* @param payloadType the type of payload (input or output)
* @return {@link ExternalStorageLocation} containing the uri and the path to the payload is
* stored in external storage
*/
ExternalStorageLocation getExternalStorageLocation(
String path, String operation, String payloadType);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/AdminService.java | core/src/main/java/com/netflix/conductor/service/AdminService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import java.util.Map;
import org.springframework.validation.annotation.Validated;
import com.netflix.conductor.common.metadata.tasks.Task;
import jakarta.validation.constraints.NotEmpty;
@Validated
public interface AdminService {
/**
* Queue up all the running workflows for sweep.
*
* @param workflowId Id of the workflow
* @return the id of the workflow instance that can be use for tracking.
*/
String requeueSweep(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId);
/**
* Get all the configuration parameters.
*
* @return all the configuration parameters.
*/
Map<String, Object> getAllConfig();
/**
* Get the list of pending tasks for a given task type.
*
* @param taskType Name of the task
* @param start Start index of pagination
* @param count Number of entries
* @return list of pending {@link Task}
*/
List<Task> getListOfPendingTask(
@NotEmpty(message = "TaskType cannot be null or empty.") String taskType,
Integer start,
Integer count);
/**
* Verify that the Workflow is consistent, and run repairs as needed.
*
* @param workflowId id of the workflow to be returned
* @return true, if repair was successful
*/
boolean verifyAndRepairWorkflowConsistency(
@NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId);
/**
* Get registered queues.
*
* @param verbose `true|false` for verbose logs
* @return map of event queues
*/
Map<String, ?> getEventQueues(boolean verbose);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/TaskServiceImpl.java | core/src/main/java/com/netflix/conductor/service/TaskServiceImpl.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Audit;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.metadata.tasks.PollData;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskExecLog;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
@Audit
@Trace
@Service
public class TaskServiceImpl implements TaskService {
private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class);
private final ExecutionService executionService;
private final QueueDAO queueDAO;
public TaskServiceImpl(ExecutionService executionService, QueueDAO queueDAO) {
this.executionService = executionService;
this.queueDAO = queueDAO;
}
/**
* Poll for a task of a certain type.
*
* @param taskType Task name
* @param workerId id of the workflow
* @param domain Domain of the workflow
* @return polled {@link Task}
*/
public Task poll(String taskType, String workerId, String domain) {
LOGGER.debug("Task being polled: /tasks/poll/{}?{}&{}", taskType, workerId, domain);
Task task = executionService.getLastPollTask(taskType, workerId, domain);
if (task != null) {
LOGGER.debug(
"The Task {} being returned for /tasks/poll/{}?{}&{}",
task,
taskType,
workerId,
domain);
}
Monitors.recordTaskPollCount(taskType, domain, 1);
return task;
}
/**
* Batch Poll for a task of a certain type.
*
* @param taskType Task Name
* @param workerId id of the workflow
* @param domain Domain of the workflow
* @param count Number of tasks
* @param timeout Timeout for polling in milliseconds
* @return list of {@link Task}
*/
public List<Task> batchPoll(
String taskType, String workerId, String domain, Integer count, Integer timeout) {
LOGGER.debug(
"Tasks being batch polled: /tasks/poll/batch/{}?{}&{}&{}&{}",
taskType,
workerId,
domain,
count,
timeout);
List<Task> polledTasks = executionService.poll(taskType, workerId, domain, count, timeout);
LOGGER.debug(
"The Tasks {} being returned for /tasks/poll/batch/{}?{}&{}&{}&{}",
polledTasks.stream().map(Task::getTaskId).collect(Collectors.toList()),
taskType,
workerId,
domain,
count,
timeout);
Monitors.recordTaskPollCount(taskType, domain, polledTasks.size());
return polledTasks;
}
/**
* Get in progress tasks. The results are paginated.
*
* @param taskType Task Name
* @param startKey Start index of pagination
* @param count Number of entries
* @return list of {@link Task}
*/
public List<Task> getTasks(String taskType, String startKey, Integer count) {
return executionService.getTasks(taskType, startKey, count);
}
/**
* Get in progress task for a given workflow id.
*
* @param workflowId id of the workflow
* @param taskReferenceName Task reference name.
* @return instance of {@link Task}
*/
public Task getPendingTaskForWorkflow(String workflowId, String taskReferenceName) {
return executionService.getPendingTaskForWorkflow(taskReferenceName, workflowId);
}
/**
* Updates a task.
*
* @param taskResult Instance of {@link TaskResult}
* @return the updated task.
*/
public TaskModel updateTask(TaskResult taskResult) {
LOGGER.debug(
"Update Task: {} with callback time: {}",
taskResult,
taskResult.getCallbackAfterSeconds());
return executionService.updateTask(taskResult);
}
@Override
public String updateTask(
String workflowId,
String taskRefName,
TaskResult.Status status,
String workerId,
Map<String, Object> output) {
Task pending = getPendingTaskForWorkflow(workflowId, taskRefName);
if (pending == null) {
return null;
}
TaskResult taskResult = new TaskResult(pending);
taskResult.setStatus(status);
taskResult.getOutputData().putAll(output);
if (StringUtils.isNotBlank(workerId)) {
taskResult.setWorkerId(workerId);
}
TaskModel updatedTask = updateTask(taskResult);
if (updatedTask != null) {
return updatedTask.getTaskId();
}
return null;
}
/**
* Ack Task is received.
*
* @param taskId id of the task
* @param workerId id of the worker
* @return `true|false` if task is received or not
*/
public String ackTaskReceived(String taskId, String workerId) {
LOGGER.debug("Ack received for task: {} from worker: {}", taskId, workerId);
return String.valueOf(ackTaskReceived(taskId));
}
/**
* Ack Task is received.
*
* @param taskId id of the task
* @return `true|false` if task is received or not
*/
public boolean ackTaskReceived(String taskId) {
LOGGER.debug("Ack received for task: {}", taskId);
AtomicBoolean ackResult = new AtomicBoolean(false);
try {
ackResult.set(executionService.ackTaskReceived(taskId));
} catch (Exception e) {
// Fail the task and let decide reevaluate the workflow, thereby preventing workflow
// being stuck from transient ack errors.
String errorMsg = String.format("Error when trying to ack task %s", taskId);
LOGGER.error(errorMsg, e);
Task task = executionService.getTask(taskId);
Monitors.recordAckTaskError(task.getTaskType());
failTask(task, errorMsg);
ackResult.set(false);
}
return ackResult.get();
}
/** Updates the task with FAILED status; On exception, fails the workflow. */
private void failTask(Task task, String errorMsg) {
try {
TaskResult taskResult = new TaskResult();
taskResult.setStatus(TaskResult.Status.FAILED);
taskResult.setTaskId(task.getTaskId());
taskResult.setWorkflowInstanceId(task.getWorkflowInstanceId());
taskResult.setReasonForIncompletion(errorMsg);
executionService.updateTask(taskResult);
} catch (Exception e) {
LOGGER.error(
"Unable to fail task: {} in workflow: {}",
task.getTaskId(),
task.getWorkflowInstanceId(),
e);
executionService.terminateWorkflow(
task.getWorkflowInstanceId(), "Failed to ack task: " + task.getTaskId());
}
}
/**
* Log Task Execution Details.
*
* @param taskId id of the task
* @param log Details you want to log
*/
public void log(String taskId, String log) {
executionService.log(taskId, log);
}
/**
* Get Task Execution Logs.
*
* @param taskId id of the task.
* @return list of {@link TaskExecLog}
*/
public List<TaskExecLog> getTaskLogs(String taskId) {
return executionService.getTaskLogs(taskId);
}
/**
* Get task by Id.
*
* @param taskId id of the task.
* @return instance of {@link Task}
*/
public Task getTask(String taskId) {
return executionService.getTask(taskId);
}
/**
* Remove Task from a Task type queue.
*
* @param taskType Task Name
* @param taskId ID of the task
*/
public void removeTaskFromQueue(String taskType, String taskId) {
executionService.removeTaskFromQueue(taskId);
}
/**
* Remove Task from a Task type queue.
*
* @param taskId ID of the task
*/
public void removeTaskFromQueue(String taskId) {
executionService.removeTaskFromQueue(taskId);
}
/**
* Get Task type queue sizes.
*
* @param taskTypes List of task types.
* @return map of task type as Key and queue size as value.
*/
public Map<String, Integer> getTaskQueueSizes(List<String> taskTypes) {
return executionService.getTaskQueueSizes(taskTypes);
}
@Override
public Integer getTaskQueueSize(
String taskType, String domain, String isolationGroupId, String executionNamespace) {
String queueName =
QueueUtils.getQueueName(
taskType,
StringUtils.trimToNull(domain),
StringUtils.trimToNull(isolationGroupId),
StringUtils.trimToNull(executionNamespace));
return executionService.getTaskQueueSize(queueName);
}
/**
* Get the details about each queue.
*
* @return map of queue details.
*/
public Map<String, Map<String, Map<String, Long>>> allVerbose() {
return queueDAO.queuesDetailVerbose();
}
/**
* Get the details about each queue.
*
* @return map of details about each queue.
*/
public Map<String, Long> getAllQueueDetails() {
return queueDAO.queuesDetail().entrySet().stream()
.sorted(Entry.comparingByKey())
.collect(
Collectors.toMap(
Entry::getKey,
Entry::getValue,
(v1, v2) -> v1,
LinkedHashMap::new));
}
/**
* Get the last poll data for a given task type.
*
* @param taskType Task Name
* @return list of {@link PollData}
*/
public List<PollData> getPollData(String taskType) {
return executionService.getPollData(taskType);
}
/**
* Get the last poll data for all task types.
*
* @return list of {@link PollData}
*/
public List<PollData> getAllPollData() {
return executionService.getAllPollData();
}
/**
* Requeue pending tasks.
*
* @param taskType Task name.
* @return number of tasks requeued.
*/
public String requeuePendingTask(String taskType) {
return String.valueOf(executionService.requeuePendingTasks(taskType));
}
/**
* Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g.
* sort=name or sort=workflowId. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<TaskSummary> search(
int start, int size, String sort, String freeText, String query) {
return executionService.getSearchTasks(query, freeText, start, size, sort);
}
/**
* Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g.
* sort=name or sort=workflowId. If order is not specified, defaults to ASC.
*
* @param start Start index of pagination
* @param size Number of entries
* @param sort Sorting type ASC|DESC
* @param freeText Text you want to search
* @param query Query you want to search
* @return instance of {@link SearchResult}
*/
public SearchResult<Task> searchV2(
int start, int size, String sort, String freeText, String query) {
return executionService.getSearchTasksV2(query, freeText, start, size, sort);
}
/**
* Get the external storage location where the task output payload is stored/to be stored
*
* @param path the path for which the external storage location is to be populated
* @param operation the operation to be performed (read or write)
* @param type the type of payload (input or output)
* @return {@link ExternalStorageLocation} containing the uri and the path to the payload is
* stored in external storage
*/
public ExternalStorageLocation getExternalStorageLocation(
String path, String operation, String type) {
return executionService.getExternalStorageLocation(path, operation, type);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java | core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.service;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Audit;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.model.BulkResponse;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.WorkflowModel;
@Audit
@Trace
@Service
public class WorkflowBulkServiceImpl implements WorkflowBulkService {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowBulkService.class);
private final WorkflowExecutor workflowExecutor;
private final WorkflowService workflowService;
public WorkflowBulkServiceImpl(
WorkflowExecutor workflowExecutor, WorkflowService workflowService) {
this.workflowExecutor = workflowExecutor;
this.workflowService = workflowService;
}
/**
* Pause the list of workflows.
*
* @param workflowIds - list of workflow Ids to perform pause operation on
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
public BulkResponse<String> pauseWorkflow(List<String> workflowIds) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.pauseWorkflow(workflowId);
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk pauseWorkflow exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
/**
* Resume the list of workflows.
*
* @param workflowIds - list of workflow Ids to perform resume operation on
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
public BulkResponse<String> resumeWorkflow(List<String> workflowIds) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.resumeWorkflow(workflowId);
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk resumeWorkflow exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
/**
* Restart the list of workflows.
*
* @param workflowIds - list of workflow Ids to perform restart operation on
* @param useLatestDefinitions if true, use latest workflow and task definitions upon restart
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
public BulkResponse<String> restart(List<String> workflowIds, boolean useLatestDefinitions) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.restart(workflowId, useLatestDefinitions);
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk restart exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
/**
* Retry the last failed task for each workflow from the list.
*
* @param workflowIds - list of workflow Ids to perform retry operation on
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
public BulkResponse<String> retry(List<String> workflowIds) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.retry(workflowId, false);
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk retry exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
/**
* Terminate workflows execution.
*
* @param workflowIds - list of workflow Ids to perform terminate operation on
* @param reason - description to be specified for the terminated workflow for future
* references.
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
public BulkResponse<String> terminate(List<String> workflowIds, String reason) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.terminateWorkflow(workflowId, reason);
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk terminate exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
/**
* Removes a list of workflows from the system.
*
* @param workflowIds List of WorkflowIDs of the workflows you want to remove from system.
* @param archiveWorkflow Archives the workflow and associated tasks instead of removing them.
*/
public BulkResponse<String> deleteWorkflow(List<String> workflowIds, boolean archiveWorkflow) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowService.deleteWorkflow(
workflowId,
archiveWorkflow); // TODO: change this to method that cancels then deletes
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk delete exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
/**
* Terminates execution for workflows in a list, then removes each workflow.
*
* @param workflowIds List of workflow IDs to terminate and delete.
* @param reason Reason for terminating the workflow.
* @param archiveWorkflow Archives the workflow and associated tasks instead of removing them.
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
public BulkResponse<String> terminateRemove(
List<String> workflowIds, String reason, boolean archiveWorkflow) {
BulkResponse<String> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.terminateWorkflow(workflowId, reason);
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk terminate exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
try {
workflowService.deleteWorkflow(workflowId, archiveWorkflow);
bulkResponse.appendSuccessResponse(workflowId);
} catch (Exception e) {
LOGGER.error(
"bulk delete exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
/**
* Fetch workflow details for given workflowIds.
*
* @param workflowIds List of workflow IDs to terminate and delete.
* @param includeTasks includes tasks from workflow
* @return bulk response object containing a list of workflow details
*/
@Override
public BulkResponse<WorkflowModel> searchWorkflow(
List<String> workflowIds, boolean includeTasks) {
BulkResponse<WorkflowModel> bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
WorkflowModel workflowModel =
workflowExecutor.getWorkflow(workflowId, includeTasks);
bulkResponse.appendSuccessResponse(workflowModel);
} catch (Exception e) {
LOGGER.error(
"bulk search exception, workflowId {}, message: {} ",
workflowId,
e.getMessage(),
e);
bulkResponse.appendFailedResponse(workflowId, e.getMessage());
}
}
return bulkResponse;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/annotations/Audit.java | core/src/main/java/com/netflix/conductor/annotations/Audit.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/** Mark service for custom audit implementation */
@Target({TYPE})
@Retention(RUNTIME)
public @interface Audit {}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/annotations/VisibleForTesting.java | core/src/main/java/com/netflix/conductor/annotations/VisibleForTesting.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.annotations;
import java.lang.annotation.*;
/**
* Annotates a program element that exists, or is more widely visible than otherwise necessary, only
* for use in test code.
*/
@Retention(RetentionPolicy.CLASS)
@Target({ElementType.FIELD, ElementType.TYPE, ElementType.METHOD})
@Documented
public @interface VisibleForTesting {}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/annotations/Trace.java | core/src/main/java/com/netflix/conductor/annotations/Trace.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@Target({TYPE})
@Retention(RUNTIME)
public @interface Trace {}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/model/WorkflowModel.java | core/src/main/java/com/netflix/conductor/model/WorkflowModel.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.model;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.core.utils.Utils;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
public class WorkflowModel {
public enum Status {
RUNNING(false, false),
COMPLETED(true, true),
FAILED(true, false),
TIMED_OUT(true, false),
TERMINATED(true, false),
PAUSED(false, true);
private final boolean terminal;
private final boolean successful;
Status(boolean terminal, boolean successful) {
this.terminal = terminal;
this.successful = successful;
}
public boolean isTerminal() {
return terminal;
}
public boolean isSuccessful() {
return successful;
}
}
private Status status = Status.RUNNING;
private long endTime;
private String workflowId;
private String parentWorkflowId;
private String parentWorkflowTaskId;
private List<TaskModel> tasks = new LinkedList<>();
private String correlationId;
private String reRunFromWorkflowId;
private String reasonForIncompletion;
private String event;
private Map<String, String> taskToDomain = new HashMap<>();
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Set<String> failedReferenceTaskNames = new HashSet<>();
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Set<String> failedTaskNames = new HashSet<>();
private WorkflowDef workflowDefinition;
private String externalInputPayloadStoragePath;
private String externalOutputPayloadStoragePath;
private int priority;
private Map<String, Object> variables = new HashMap<>();
private long lastRetriedTime;
private String ownerApp;
private Long createTime;
private Long updatedTime;
private String createdBy;
private String updatedBy;
// Capture the failed taskId if the workflow execution failed because of task failure
private String failedTaskId;
private Status previousStatus;
@JsonIgnore private Map<String, Object> input = new HashMap<>();
@JsonIgnore private Map<String, Object> output = new HashMap<>();
@JsonIgnore private Map<String, Object> inputPayload = new HashMap<>();
@JsonIgnore private Map<String, Object> outputPayload = new HashMap<>();
public Status getPreviousStatus() {
return previousStatus;
}
public void setPreviousStatus(Status status) {
this.previousStatus = status;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
// update previous status if current status changed
if (this.status != status) {
setPreviousStatus(this.status);
}
this.status = status;
}
public long getEndTime() {
return endTime;
}
public void setEndTime(long endTime) {
this.endTime = endTime;
}
public String getWorkflowId() {
return workflowId;
}
public void setWorkflowId(String workflowId) {
this.workflowId = workflowId;
}
public String getParentWorkflowId() {
return parentWorkflowId;
}
public void setParentWorkflowId(String parentWorkflowId) {
this.parentWorkflowId = parentWorkflowId;
}
public String getParentWorkflowTaskId() {
return parentWorkflowTaskId;
}
public void setParentWorkflowTaskId(String parentWorkflowTaskId) {
this.parentWorkflowTaskId = parentWorkflowTaskId;
}
public List<TaskModel> getTasks() {
return tasks;
}
public void setTasks(List<TaskModel> tasks) {
this.tasks = tasks;
}
@JsonIgnore
public Map<String, Object> getInput() {
if (!inputPayload.isEmpty() && !input.isEmpty()) {
input.putAll(inputPayload);
inputPayload = new HashMap<>();
return input;
} else if (inputPayload.isEmpty()) {
return input;
} else {
return inputPayload;
}
}
@JsonIgnore
public void setInput(Map<String, Object> input) {
if (input == null) {
input = new HashMap<>();
}
this.input = input;
}
@JsonIgnore
public Map<String, Object> getOutput() {
if (!outputPayload.isEmpty() && !output.isEmpty()) {
output.putAll(outputPayload);
outputPayload = new HashMap<>();
return output;
} else if (outputPayload.isEmpty()) {
return output;
} else {
return outputPayload;
}
}
@JsonIgnore
public void setOutput(Map<String, Object> output) {
if (output == null) {
output = new HashMap<>();
}
this.output = output;
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@Deprecated
@JsonProperty("input")
public Map<String, Object> getRawInput() {
return input;
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@Deprecated
@JsonProperty("input")
public void setRawInput(Map<String, Object> input) {
setInput(input);
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@Deprecated
@JsonProperty("output")
public Map<String, Object> getRawOutput() {
return output;
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@Deprecated
@JsonProperty("output")
public void setRawOutput(Map<String, Object> output) {
setOutput(output);
}
public String getCorrelationId() {
return correlationId;
}
public void setCorrelationId(String correlationId) {
this.correlationId = correlationId;
}
public String getReRunFromWorkflowId() {
return reRunFromWorkflowId;
}
public void setReRunFromWorkflowId(String reRunFromWorkflowId) {
this.reRunFromWorkflowId = reRunFromWorkflowId;
}
public String getReasonForIncompletion() {
return reasonForIncompletion;
}
public void setReasonForIncompletion(String reasonForIncompletion) {
this.reasonForIncompletion = reasonForIncompletion;
}
public String getEvent() {
return event;
}
public void setEvent(String event) {
this.event = event;
}
public Map<String, String> getTaskToDomain() {
return taskToDomain;
}
public void setTaskToDomain(Map<String, String> taskToDomain) {
this.taskToDomain = taskToDomain;
}
public Set<String> getFailedReferenceTaskNames() {
return failedReferenceTaskNames;
}
public void setFailedReferenceTaskNames(Set<String> failedReferenceTaskNames) {
this.failedReferenceTaskNames = failedReferenceTaskNames;
}
public Set<String> getFailedTaskNames() {
return failedTaskNames;
}
public void setFailedTaskNames(Set<String> failedTaskNames) {
this.failedTaskNames = failedTaskNames;
}
public WorkflowDef getWorkflowDefinition() {
return workflowDefinition;
}
public void setWorkflowDefinition(WorkflowDef workflowDefinition) {
this.workflowDefinition = workflowDefinition;
}
public String getExternalInputPayloadStoragePath() {
return externalInputPayloadStoragePath;
}
public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) {
this.externalInputPayloadStoragePath = externalInputPayloadStoragePath;
}
public String getExternalOutputPayloadStoragePath() {
return externalOutputPayloadStoragePath;
}
public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) {
this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
if (priority < 0 || priority > 99) {
throw new IllegalArgumentException("priority MUST be between 0 and 99 (inclusive)");
}
this.priority = priority;
}
public Map<String, Object> getVariables() {
return variables;
}
public void setVariables(Map<String, Object> variables) {
this.variables = variables;
}
public long getLastRetriedTime() {
return lastRetriedTime;
}
public void setLastRetriedTime(long lastRetriedTime) {
this.lastRetriedTime = lastRetriedTime;
}
public String getOwnerApp() {
return ownerApp;
}
public void setOwnerApp(String ownerApp) {
this.ownerApp = ownerApp;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public Long getUpdatedTime() {
return updatedTime;
}
public void setUpdatedTime(Long updatedTime) {
this.updatedTime = updatedTime;
}
public String getCreatedBy() {
return createdBy;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
public String getUpdatedBy() {
return updatedBy;
}
public void setUpdatedBy(String updatedBy) {
this.updatedBy = updatedBy;
}
public String getFailedTaskId() {
return failedTaskId;
}
public void setFailedTaskId(String failedTaskId) {
this.failedTaskId = failedTaskId;
}
/**
* Convenience method for accessing the workflow definition name.
*
* @return the workflow definition name.
*/
public String getWorkflowName() {
Utils.checkNotNull(workflowDefinition, "Workflow definition is null");
return workflowDefinition.getName();
}
/**
* Convenience method for accessing the workflow definition version.
*
* @return the workflow definition version.
*/
public int getWorkflowVersion() {
Utils.checkNotNull(workflowDefinition, "Workflow definition is null");
return workflowDefinition.getVersion();
}
public boolean hasParent() {
return StringUtils.isNotEmpty(parentWorkflowId);
}
/**
* A string representation of all relevant fields that identify this workflow. Intended for use
* in log and other system generated messages.
*/
public String toShortString() {
String name = workflowDefinition != null ? workflowDefinition.getName() : null;
Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null;
return String.format("%s.%s/%s", name, version, workflowId);
}
public TaskModel getTaskByRefName(String refName) {
if (refName == null) {
throw new RuntimeException(
"refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value");
}
LinkedList<TaskModel> found = new LinkedList<>();
for (TaskModel task : tasks) {
if (task.getReferenceTaskName() == null) {
throw new RuntimeException(
"Task "
+ task.getTaskDefName()
+ ", seq="
+ task.getSeq()
+ " does not have reference name specified.");
}
if (task.getReferenceTaskName().equals(refName)) {
found.add(task);
}
}
if (found.isEmpty()) {
return null;
}
return found.getLast();
}
public void externalizeInput(String path) {
this.inputPayload = this.input;
this.input = new HashMap<>();
this.externalInputPayloadStoragePath = path;
}
public void externalizeOutput(String path) {
this.outputPayload = this.output;
this.output = new HashMap<>();
this.externalOutputPayloadStoragePath = path;
}
public void internalizeInput(Map<String, Object> data) {
this.input = new HashMap<>();
this.inputPayload = data;
}
public void internalizeOutput(Map<String, Object> data) {
this.output = new HashMap<>();
this.outputPayload = data;
}
@Override
public String toString() {
String name = workflowDefinition != null ? workflowDefinition.getName() : null;
Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null;
return String.format("%s.%s/%s.%s", name, version, workflowId, status);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
WorkflowModel that = (WorkflowModel) o;
return getEndTime() == that.getEndTime()
&& getPriority() == that.getPriority()
&& getLastRetriedTime() == that.getLastRetriedTime()
&& getStatus() == that.getStatus()
&& Objects.equals(getWorkflowId(), that.getWorkflowId())
&& Objects.equals(getParentWorkflowId(), that.getParentWorkflowId())
&& Objects.equals(getParentWorkflowTaskId(), that.getParentWorkflowTaskId())
&& Objects.equals(getTasks(), that.getTasks())
&& Objects.equals(getInput(), that.getInput())
&& Objects.equals(output, that.output)
&& Objects.equals(outputPayload, that.outputPayload)
&& Objects.equals(getCorrelationId(), that.getCorrelationId())
&& Objects.equals(getReRunFromWorkflowId(), that.getReRunFromWorkflowId())
&& Objects.equals(getReasonForIncompletion(), that.getReasonForIncompletion())
&& Objects.equals(getEvent(), that.getEvent())
&& Objects.equals(getTaskToDomain(), that.getTaskToDomain())
&& Objects.equals(getFailedReferenceTaskNames(), that.getFailedReferenceTaskNames())
&& Objects.equals(getFailedTaskNames(), that.getFailedTaskNames())
&& Objects.equals(getWorkflowDefinition(), that.getWorkflowDefinition())
&& Objects.equals(
getExternalInputPayloadStoragePath(),
that.getExternalInputPayloadStoragePath())
&& Objects.equals(
getExternalOutputPayloadStoragePath(),
that.getExternalOutputPayloadStoragePath())
&& Objects.equals(getVariables(), that.getVariables())
&& Objects.equals(getOwnerApp(), that.getOwnerApp())
&& Objects.equals(getCreateTime(), that.getCreateTime())
&& Objects.equals(getUpdatedTime(), that.getUpdatedTime())
&& Objects.equals(getCreatedBy(), that.getCreatedBy())
&& Objects.equals(getUpdatedBy(), that.getUpdatedBy());
}
@Override
public int hashCode() {
return Objects.hash(
getStatus(),
getEndTime(),
getWorkflowId(),
getParentWorkflowId(),
getParentWorkflowTaskId(),
getTasks(),
getInput(),
output,
outputPayload,
getCorrelationId(),
getReRunFromWorkflowId(),
getReasonForIncompletion(),
getEvent(),
getTaskToDomain(),
getFailedReferenceTaskNames(),
getFailedTaskNames(),
getWorkflowDefinition(),
getExternalInputPayloadStoragePath(),
getExternalOutputPayloadStoragePath(),
getPriority(),
getVariables(),
getLastRetriedTime(),
getOwnerApp(),
getCreateTime(),
getUpdatedTime(),
getCreatedBy(),
getUpdatedBy());
}
public Workflow toWorkflow() {
Workflow workflow = new Workflow();
BeanUtils.copyProperties(this, workflow);
workflow.setStatus(Workflow.WorkflowStatus.valueOf(this.status.name()));
workflow.setTasks(tasks.stream().map(TaskModel::toTask).collect(Collectors.toList()));
workflow.setUpdateTime(this.updatedTime);
// ensure that input/output is properly represented
if (externalInputPayloadStoragePath != null) {
workflow.setInput(new HashMap<>());
}
if (externalOutputPayloadStoragePath != null) {
workflow.setOutput(new HashMap<>());
}
return workflow;
}
public void addInput(String key, Object value) {
this.input.put(key, value);
}
public void addInput(Map<String, Object> inputData) {
if (inputData != null) {
this.input.putAll(inputData);
}
}
public void addOutput(String key, Object value) {
this.output.put(key, value);
}
public void addOutput(Map<String, Object> outputData) {
if (outputData != null) {
this.output.putAll(outputData);
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/model/TaskModel.java | core/src/main/java/com/netflix/conductor/model/TaskModel.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.model;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.protobuf.Any;
public class TaskModel {
public enum Status {
IN_PROGRESS(false, true, true),
CANCELED(true, false, false),
FAILED(true, false, true),
FAILED_WITH_TERMINAL_ERROR(true, false, false),
COMPLETED(true, true, true),
COMPLETED_WITH_ERRORS(true, true, true),
SCHEDULED(false, true, true),
TIMED_OUT(true, false, true),
SKIPPED(true, true, false);
private final boolean terminal;
private final boolean successful;
private final boolean retriable;
Status(boolean terminal, boolean successful, boolean retriable) {
this.terminal = terminal;
this.successful = successful;
this.retriable = retriable;
}
public boolean isTerminal() {
return terminal;
}
public boolean isSuccessful() {
return successful;
}
public boolean isRetriable() {
return retriable;
}
}
private String taskType;
private Status status;
private String referenceTaskName;
private int retryCount;
private int seq;
private String correlationId;
private int pollCount;
private String taskDefName;
/** Time when the task was scheduled */
private long scheduledTime;
/** Time when the task was first polled */
private long startTime;
/** Time when the task completed executing */
private long endTime;
/** Time when the task was last updated */
private long updateTime;
/** Time when first task started */
private long firstStartTime;
private int startDelayInSeconds;
private String retriedTaskId;
private boolean retried;
private boolean executed;
private boolean callbackFromWorker = true;
private long responseTimeoutSeconds;
private String workflowInstanceId;
private String workflowType;
private String taskId;
private String reasonForIncompletion;
private long callbackAfterSeconds;
private String workerId;
private WorkflowTask workflowTask;
private String domain;
private Any inputMessage;
private Any outputMessage;
private int rateLimitPerFrequency;
private int rateLimitFrequencyInSeconds;
private String externalInputPayloadStoragePath;
private String externalOutputPayloadStoragePath;
private int workflowPriority;
private String executionNameSpace;
private String isolationGroupId;
private int iteration;
private String subWorkflowId;
// Timeout after which the wait task should be marked as completed
private long waitTimeout;
/**
* Used to note that a sub workflow associated with SUB_WORKFLOW task has an action performed on
* it directly.
*/
private boolean subworkflowChanged;
@JsonIgnore private Map<String, Object> inputPayload = new HashMap<>();
@JsonIgnore private Map<String, Object> outputPayload = new HashMap<>();
@JsonIgnore private Map<String, Object> inputData = new HashMap<>();
@JsonIgnore private Map<String, Object> outputData = new HashMap<>();
public String getTaskType() {
return taskType;
}
public void setTaskType(String taskType) {
this.taskType = taskType;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
@JsonIgnore
public Map<String, Object> getInputData() {
if (!inputPayload.isEmpty() && !inputData.isEmpty()) {
inputData.putAll(inputPayload);
inputPayload = new HashMap<>();
return inputData;
} else if (inputPayload.isEmpty()) {
return inputData;
} else {
return inputPayload;
}
}
@JsonIgnore
public void setInputData(Map<String, Object> inputData) {
if (inputData == null) {
inputData = new HashMap<>();
}
this.inputData = inputData;
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@JsonProperty("inputData")
@Deprecated
public void setRawInputData(Map<String, Object> inputData) {
setInputData(inputData);
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@JsonProperty("inputData")
@Deprecated
public Map<String, Object> getRawInputData() {
return inputData;
}
public String getReferenceTaskName() {
return referenceTaskName;
}
public void setReferenceTaskName(String referenceTaskName) {
this.referenceTaskName = referenceTaskName;
}
public int getRetryCount() {
return retryCount;
}
public void setRetryCount(int retryCount) {
this.retryCount = retryCount;
}
public int getSeq() {
return seq;
}
public void setSeq(int seq) {
this.seq = seq;
}
public String getCorrelationId() {
return correlationId;
}
public void setCorrelationId(String correlationId) {
this.correlationId = correlationId;
}
public int getPollCount() {
return pollCount;
}
public void setPollCount(int pollCount) {
this.pollCount = pollCount;
}
public String getTaskDefName() {
if (taskDefName == null || "".equals(taskDefName)) {
taskDefName = taskType;
}
return taskDefName;
}
public void setTaskDefName(String taskDefName) {
this.taskDefName = taskDefName;
}
public long getScheduledTime() {
return scheduledTime;
}
public void setScheduledTime(long scheduledTime) {
this.scheduledTime = scheduledTime;
}
public long getStartTime() {
return startTime;
}
public void setStartTime(long startTime) {
this.startTime = startTime;
}
public long getEndTime() {
return endTime;
}
public void setEndTime(long endTime) {
this.endTime = endTime;
}
public long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(long updateTime) {
this.updateTime = updateTime;
}
public int getStartDelayInSeconds() {
return startDelayInSeconds;
}
public void setStartDelayInSeconds(int startDelayInSeconds) {
this.startDelayInSeconds = startDelayInSeconds;
}
public String getRetriedTaskId() {
return retriedTaskId;
}
public void setRetriedTaskId(String retriedTaskId) {
this.retriedTaskId = retriedTaskId;
}
public boolean isRetried() {
return retried;
}
public void setRetried(boolean retried) {
this.retried = retried;
}
public boolean isExecuted() {
return executed;
}
public void setExecuted(boolean executed) {
this.executed = executed;
}
public boolean isCallbackFromWorker() {
return callbackFromWorker;
}
public void setCallbackFromWorker(boolean callbackFromWorker) {
this.callbackFromWorker = callbackFromWorker;
}
public long getResponseTimeoutSeconds() {
return responseTimeoutSeconds;
}
public void setResponseTimeoutSeconds(long responseTimeoutSeconds) {
this.responseTimeoutSeconds = responseTimeoutSeconds;
}
public String getWorkflowInstanceId() {
return workflowInstanceId;
}
public void setWorkflowInstanceId(String workflowInstanceId) {
this.workflowInstanceId = workflowInstanceId;
}
public String getWorkflowType() {
return workflowType;
}
public void setWorkflowType(String workflowType) {
this.workflowType = workflowType;
}
public String getTaskId() {
return taskId;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public String getReasonForIncompletion() {
return reasonForIncompletion;
}
public void setReasonForIncompletion(String reasonForIncompletion) {
this.reasonForIncompletion = reasonForIncompletion;
}
public long getCallbackAfterSeconds() {
return callbackAfterSeconds;
}
public void setCallbackAfterSeconds(long callbackAfterSeconds) {
this.callbackAfterSeconds = callbackAfterSeconds;
}
public String getWorkerId() {
return workerId;
}
public void setWorkerId(String workerId) {
this.workerId = workerId;
}
@JsonIgnore
public Map<String, Object> getOutputData() {
if (!outputPayload.isEmpty() && !outputData.isEmpty()) {
// Combine payload + data
// data has precedence over payload because:
// with external storage enabled, payload contains the old values
// while data contains the latest and if payload took precedence, it
// would remove latest outputs
outputPayload.forEach(outputData::putIfAbsent);
outputPayload = new HashMap<>();
return outputData;
} else if (outputPayload.isEmpty()) {
return outputData;
} else {
return outputPayload;
}
}
@JsonIgnore
public void setOutputData(Map<String, Object> outputData) {
if (outputData == null) {
outputData = new HashMap<>();
}
this.outputData = outputData;
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@JsonProperty("outputData")
@Deprecated
public void setRawOutputData(Map<String, Object> inputData) {
setOutputData(inputData);
}
/**
* @deprecated Used only for JSON serialization and deserialization.
*/
@JsonProperty("outputData")
@Deprecated
public Map<String, Object> getRawOutputData() {
return outputData;
}
public WorkflowTask getWorkflowTask() {
return workflowTask;
}
public void setWorkflowTask(WorkflowTask workflowTask) {
this.workflowTask = workflowTask;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public Any getInputMessage() {
return inputMessage;
}
public void setInputMessage(Any inputMessage) {
this.inputMessage = inputMessage;
}
public Any getOutputMessage() {
return outputMessage;
}
public void setOutputMessage(Any outputMessage) {
this.outputMessage = outputMessage;
}
public int getRateLimitPerFrequency() {
return rateLimitPerFrequency;
}
public void setRateLimitPerFrequency(int rateLimitPerFrequency) {
this.rateLimitPerFrequency = rateLimitPerFrequency;
}
public int getRateLimitFrequencyInSeconds() {
return rateLimitFrequencyInSeconds;
}
public void setRateLimitFrequencyInSeconds(int rateLimitFrequencyInSeconds) {
this.rateLimitFrequencyInSeconds = rateLimitFrequencyInSeconds;
}
public String getExternalInputPayloadStoragePath() {
return externalInputPayloadStoragePath;
}
public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) {
this.externalInputPayloadStoragePath = externalInputPayloadStoragePath;
}
public String getExternalOutputPayloadStoragePath() {
return externalOutputPayloadStoragePath;
}
public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) {
this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath;
}
public int getWorkflowPriority() {
return workflowPriority;
}
public void setWorkflowPriority(int workflowPriority) {
this.workflowPriority = workflowPriority;
}
public String getExecutionNameSpace() {
return executionNameSpace;
}
public void setExecutionNameSpace(String executionNameSpace) {
this.executionNameSpace = executionNameSpace;
}
public String getIsolationGroupId() {
return isolationGroupId;
}
public void setIsolationGroupId(String isolationGroupId) {
this.isolationGroupId = isolationGroupId;
}
public int getIteration() {
return iteration;
}
public void setIteration(int iteration) {
this.iteration = iteration;
}
public String getSubWorkflowId() {
// For backwards compatibility
if (StringUtils.isNotBlank(subWorkflowId)) {
return subWorkflowId;
} else {
return this.getOutputData() != null && this.getOutputData().get("subWorkflowId") != null
? (String) this.getOutputData().get("subWorkflowId")
: this.getInputData() != null
? (String) this.getInputData().get("subWorkflowId")
: null;
}
}
public void setSubWorkflowId(String subWorkflowId) {
this.subWorkflowId = subWorkflowId;
// For backwards compatibility
if (this.outputData != null && this.outputData.containsKey("subWorkflowId")) {
this.outputData.put("subWorkflowId", subWorkflowId);
}
}
public boolean isSubworkflowChanged() {
return subworkflowChanged;
}
public void setSubworkflowChanged(boolean subworkflowChanged) {
this.subworkflowChanged = subworkflowChanged;
}
public void incrementPollCount() {
++this.pollCount;
}
/**
* @return {@link Optional} containing the task definition if available
*/
@JsonIgnore
public Optional<TaskDef> getTaskDefinition() {
return Optional.ofNullable(this.getWorkflowTask()).map(WorkflowTask::getTaskDefinition);
}
public boolean isLoopOverTask() {
return iteration > 0;
}
public long getWaitTimeout() {
return waitTimeout;
}
public void setWaitTimeout(long waitTimeout) {
this.waitTimeout = waitTimeout;
}
/**
* @return the queueWaitTime
*/
public long getQueueWaitTime() {
if (this.startTime > 0 && this.scheduledTime > 0) {
if (this.updateTime > 0 && getCallbackAfterSeconds() > 0) {
long waitTime =
System.currentTimeMillis()
- (this.updateTime + (getCallbackAfterSeconds() * 1000));
return waitTime > 0 ? waitTime : 0;
} else {
return this.startTime - this.scheduledTime;
}
}
return 0L;
}
/**
* @return a copy of the task instance
*/
public TaskModel copy() {
TaskModel copy = new TaskModel();
BeanUtils.copyProperties(this, copy);
return copy;
}
public void externalizeInput(String path) {
this.inputPayload = this.inputData;
this.inputData = new HashMap<>();
this.externalInputPayloadStoragePath = path;
}
public void externalizeOutput(String path) {
this.outputPayload = this.outputData;
this.outputData = new HashMap<>();
this.externalOutputPayloadStoragePath = path;
}
public void internalizeInput(Map<String, Object> data) {
this.inputData = new HashMap<>();
this.inputPayload = data;
}
public void internalizeOutput(Map<String, Object> data) {
this.outputData = new HashMap<>();
this.outputPayload = data;
}
@Override
public String toString() {
return "TaskModel{"
+ "taskType='"
+ taskType
+ '\''
+ ", status="
+ status
+ ", inputData="
+ inputData
+ ", referenceTaskName='"
+ referenceTaskName
+ '\''
+ ", retryCount="
+ retryCount
+ ", seq="
+ seq
+ ", correlationId='"
+ correlationId
+ '\''
+ ", pollCount="
+ pollCount
+ ", taskDefName='"
+ taskDefName
+ '\''
+ ", scheduledTime="
+ scheduledTime
+ ", startTime="
+ startTime
+ ", endTime="
+ endTime
+ ", updateTime="
+ updateTime
+ ", startDelayInSeconds="
+ startDelayInSeconds
+ ", retriedTaskId='"
+ retriedTaskId
+ '\''
+ ", retried="
+ retried
+ ", executed="
+ executed
+ ", callbackFromWorker="
+ callbackFromWorker
+ ", responseTimeoutSeconds="
+ responseTimeoutSeconds
+ ", workflowInstanceId='"
+ workflowInstanceId
+ '\''
+ ", workflowType='"
+ workflowType
+ '\''
+ ", taskId='"
+ taskId
+ '\''
+ ", reasonForIncompletion='"
+ reasonForIncompletion
+ '\''
+ ", callbackAfterSeconds="
+ callbackAfterSeconds
+ ", workerId='"
+ workerId
+ '\''
+ ", outputData="
+ outputData
+ ", workflowTask="
+ workflowTask
+ ", domain='"
+ domain
+ '\''
+ ", waitTimeout='"
+ waitTimeout
+ '\''
+ ", inputMessage="
+ inputMessage
+ ", outputMessage="
+ outputMessage
+ ", rateLimitPerFrequency="
+ rateLimitPerFrequency
+ ", rateLimitFrequencyInSeconds="
+ rateLimitFrequencyInSeconds
+ ", externalInputPayloadStoragePath='"
+ externalInputPayloadStoragePath
+ '\''
+ ", externalOutputPayloadStoragePath='"
+ externalOutputPayloadStoragePath
+ '\''
+ ", workflowPriority="
+ workflowPriority
+ ", executionNameSpace='"
+ executionNameSpace
+ '\''
+ ", isolationGroupId='"
+ isolationGroupId
+ '\''
+ ", iteration="
+ iteration
+ ", subWorkflowId='"
+ subWorkflowId
+ '\''
+ ", subworkflowChanged="
+ subworkflowChanged
+ '}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TaskModel taskModel = (TaskModel) o;
return getRetryCount() == taskModel.getRetryCount()
&& getSeq() == taskModel.getSeq()
&& getPollCount() == taskModel.getPollCount()
&& getScheduledTime() == taskModel.getScheduledTime()
&& getStartTime() == taskModel.getStartTime()
&& getEndTime() == taskModel.getEndTime()
&& getUpdateTime() == taskModel.getUpdateTime()
&& getStartDelayInSeconds() == taskModel.getStartDelayInSeconds()
&& isRetried() == taskModel.isRetried()
&& isExecuted() == taskModel.isExecuted()
&& isCallbackFromWorker() == taskModel.isCallbackFromWorker()
&& getResponseTimeoutSeconds() == taskModel.getResponseTimeoutSeconds()
&& getCallbackAfterSeconds() == taskModel.getCallbackAfterSeconds()
&& getRateLimitPerFrequency() == taskModel.getRateLimitPerFrequency()
&& getRateLimitFrequencyInSeconds() == taskModel.getRateLimitFrequencyInSeconds()
&& getWorkflowPriority() == taskModel.getWorkflowPriority()
&& getIteration() == taskModel.getIteration()
&& isSubworkflowChanged() == taskModel.isSubworkflowChanged()
&& Objects.equals(getTaskType(), taskModel.getTaskType())
&& getStatus() == taskModel.getStatus()
&& Objects.equals(getInputData(), taskModel.getInputData())
&& Objects.equals(getReferenceTaskName(), taskModel.getReferenceTaskName())
&& Objects.equals(getCorrelationId(), taskModel.getCorrelationId())
&& Objects.equals(getTaskDefName(), taskModel.getTaskDefName())
&& Objects.equals(getRetriedTaskId(), taskModel.getRetriedTaskId())
&& Objects.equals(getWorkflowInstanceId(), taskModel.getWorkflowInstanceId())
&& Objects.equals(getWorkflowType(), taskModel.getWorkflowType())
&& Objects.equals(getTaskId(), taskModel.getTaskId())
&& Objects.equals(getReasonForIncompletion(), taskModel.getReasonForIncompletion())
&& Objects.equals(getWorkerId(), taskModel.getWorkerId())
&& Objects.equals(getWaitTimeout(), taskModel.getWaitTimeout())
&& Objects.equals(outputData, taskModel.outputData)
&& Objects.equals(outputPayload, taskModel.outputPayload)
&& Objects.equals(getWorkflowTask(), taskModel.getWorkflowTask())
&& Objects.equals(getDomain(), taskModel.getDomain())
&& Objects.equals(getInputMessage(), taskModel.getInputMessage())
&& Objects.equals(getOutputMessage(), taskModel.getOutputMessage())
&& Objects.equals(
getExternalInputPayloadStoragePath(),
taskModel.getExternalInputPayloadStoragePath())
&& Objects.equals(
getExternalOutputPayloadStoragePath(),
taskModel.getExternalOutputPayloadStoragePath())
&& Objects.equals(getExecutionNameSpace(), taskModel.getExecutionNameSpace())
&& Objects.equals(getIsolationGroupId(), taskModel.getIsolationGroupId())
&& Objects.equals(getSubWorkflowId(), taskModel.getSubWorkflowId());
}
@Override
public int hashCode() {
return Objects.hash(
getTaskType(),
getStatus(),
getInputData(),
getReferenceTaskName(),
getRetryCount(),
getSeq(),
getCorrelationId(),
getPollCount(),
getTaskDefName(),
getScheduledTime(),
getStartTime(),
getEndTime(),
getUpdateTime(),
getStartDelayInSeconds(),
getRetriedTaskId(),
isRetried(),
isExecuted(),
isCallbackFromWorker(),
getResponseTimeoutSeconds(),
getWorkflowInstanceId(),
getWorkflowType(),
getTaskId(),
getReasonForIncompletion(),
getCallbackAfterSeconds(),
getWorkerId(),
getWaitTimeout(),
outputData,
outputPayload,
getWorkflowTask(),
getDomain(),
getInputMessage(),
getOutputMessage(),
getRateLimitPerFrequency(),
getRateLimitFrequencyInSeconds(),
getExternalInputPayloadStoragePath(),
getExternalOutputPayloadStoragePath(),
getWorkflowPriority(),
getExecutionNameSpace(),
getIsolationGroupId(),
getIteration(),
getSubWorkflowId(),
isSubworkflowChanged());
}
public Task toTask() {
Task task = new Task();
BeanUtils.copyProperties(this, task);
task.setStatus(Task.Status.valueOf(status.name()));
// ensure that input/output is properly represented
if (externalInputPayloadStoragePath != null) {
task.setInputData(new HashMap<>());
}
if (externalOutputPayloadStoragePath != null) {
task.setOutputData(new HashMap<>());
}
return task;
}
public static Task.Status mapToTaskStatus(TaskModel.Status status) {
return Task.Status.valueOf(status.name());
}
public void addInput(String key, Object value) {
this.inputData.put(key, value);
}
public void addInput(Map<String, Object> inputData) {
if (inputData != null) {
this.inputData.putAll(inputData);
}
}
public void addOutput(String key, Object value) {
this.outputData.put(key, value);
}
public void removeOutput(String key) {
this.outputData.remove(key);
}
public void addOutput(Map<String, Object> outputData) {
if (outputData != null) {
this.outputData.putAll(outputData);
}
}
public void clearOutput() {
this.outputData.clear();
this.outputPayload.clear();
this.externalOutputPayloadStoragePath = null;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/metrics/Monitors.java | core/src/main/java/com/netflix/conductor/metrics/Monitors.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.metrics;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
import com.netflix.conductor.contribs.metrics.MetricsCollector;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.google.common.util.concurrent.AtomicDouble;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.DistributionSummary;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.Timer;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class Monitors {
public static final String NO_DOMAIN = "NO_DOMAIN";
private static final MeterRegistry registry = MetricsCollector.getMeterRegistry();
private static final double[] percentiles = new double[] {0.5, 0.75, 0.90, 0.95, 0.99};
private static final Map<String, AtomicDouble> gauges = new ConcurrentHashMap<>();
private static final Map<String, Counter> counters = new ConcurrentHashMap<>();
private static final Map<String, Timer> timers = new ConcurrentHashMap<>();
private static final Map<String, DistributionSummary> distributionSummaries =
new ConcurrentHashMap<>();
private Monitors() {}
public static Counter getCounter(String name, String... tags) {
String key = name + Arrays.toString(tags);
return counters.computeIfAbsent(
key, s -> Counter.builder(name).tags(toTags(tags)).register(registry));
}
public static Timer getTimer(String name, String... tags) {
String key = name + Arrays.toString(tags);
return timers.computeIfAbsent(
key,
s ->
Timer.builder(name)
.tags(toTags(tags))
.publishPercentiles(percentiles)
.register(registry));
}
public static DistributionSummary distributionSummary(String name, String... tags) {
String key = name + Arrays.toString(tags);
return distributionSummaries.computeIfAbsent(
key,
s ->
DistributionSummary.builder(name)
.tags(toTags(tags))
.publishPercentileHistogram()
.register(registry));
}
public static AtomicDouble gauge(String name, String... tags) {
String key = name + Arrays.toString(tags);
return gauges.computeIfAbsent(
key,
s -> {
AtomicDouble value = new AtomicDouble(0);
Gauge.builder(name, () -> value).tags(toTags(tags)).register(registry);
return value;
});
}
private static Iterable<Tag> toTags(String... kv) {
List<Tag> tags = new ArrayList<>();
for (int i = 0; i < kv.length - 1; i += 2) {
String key = kv[i];
String value = kv[i + 1];
if (key == null || value == null) {
continue;
}
Tag tag = new ImmutableTag(key, value);
tags.add(tag);
}
return tags;
}
/**
* Increment a counter that is used to measure the rate at which some event is occurring.
* Consider a simple queue, counters would be used to measure things like the rate at which
* items are being inserted and removed.
*
* @param name
* @param additionalTags
*/
private static void counter(String name, String... additionalTags) {
getCounter(name, additionalTags).increment();
}
/**
* Set a gauge is a handle to get the current value. Typical examples for gauges would be the
* size of a queue or number of threads in the running state. Since gauges are sampled, there is
* no information about what might have occurred between samples.
*
* @param name
* @param measurement
* @param additionalTags
*/
private static void gauge(String name, long measurement, String... additionalTags) {
gauge(name, additionalTags).set(measurement);
}
/**
* @param className Name of the class
* @param methodName Method name
*/
public static void error(String className, String methodName) {
getCounter("workflow_server_error", "class", className, "methodName", methodName)
.increment();
}
public static void recordGauge(String name, long count) {
gauge(name, count);
}
public static void recordQueueWaitTime(String taskType, long queueWaitTime) {
getTimer("task_queue_wait", "taskType", taskType)
.record(queueWaitTime, TimeUnit.MILLISECONDS);
}
public static void recordTaskExecutionTime(
String taskType, long duration, boolean includesRetries, TaskModel.Status status) {
getTimer(
"task_execution",
"taskType",
taskType,
"includeRetries",
"" + includesRetries,
"status",
status.name())
.record(duration, TimeUnit.MILLISECONDS);
}
public static void recordWorkflowDecisionTime(long duration) {
getTimer("workflow_decision").record(duration, TimeUnit.MILLISECONDS);
}
public static void recordTaskPollError(String taskType, String exception) {
recordTaskPollError(taskType, NO_DOMAIN, exception);
}
public static void recordTaskPollError(String taskType, String domain, String exception) {
counter("task_poll_error", "taskType", taskType, "domain", domain, "exception", exception);
}
public static void recordTaskPoll(String taskType) {
counter("task_poll", "taskType", taskType);
}
public static void recordTaskPollCount(String taskType, int count) {
recordTaskPollCount(taskType, NO_DOMAIN, count);
}
public static void recordTaskPollCount(String taskType, String domain, int count) {
getCounter("task_poll_count", "taskType", taskType, "domain", "" + domain).increment(count);
}
public static void recordQueueDepth(String taskType, long size, String ownerApp) {
gauge(
"task_queue_depth",
size,
"taskType",
taskType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordEventQueueDepth(String queueType, long size) {
gauge("event_queue_depth", size, "queueType", queueType);
}
public static void recordTaskInProgress(String taskType, long size, String ownerApp) {
gauge(
"task_in_progress",
size,
"taskType",
taskType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordRunningWorkflows(long count, String name, String ownerApp) {
gauge(
"workflow_running",
count,
"workflowName",
name,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordNumTasksInWorkflow(long count, String name, String version) {
distributionSummary("tasks_in_workflow", "workflowName", name, "version", version)
.record(count);
}
public static void recordTaskTimeout(String taskType) {
counter("task_timeout", "taskType", taskType);
}
public static void recordTaskResponseTimeout(String taskType) {
counter("task_response_timeout", "taskType", taskType);
}
public static void recordTaskPendingTime(String taskType, String workflowType, long duration) {
gauge("task_pending_time", duration, "workflowName", workflowType, "taskType", taskType);
}
public static void recordWorkflowTermination(
String workflowType, WorkflowModel.Status status, String ownerApp) {
counter(
"workflow_failure",
"workflowName",
workflowType,
"status",
status.name(),
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordWorkflowStartSuccess(
String workflowType, String version, String ownerApp) {
counter(
"workflow_start_success",
"workflowName",
workflowType,
"version",
version,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordWorkflowStartError(String workflowType, String ownerApp) {
counter(
"workflow_start_error",
"workflowName",
workflowType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"));
}
public static void recordUpdateConflict(
String taskType, String workflowType, WorkflowModel.Status status) {
counter(
"task_update_conflict",
"workflowName",
workflowType,
"taskType",
taskType,
"workflowStatus",
status.name());
}
public static void recordUpdateConflict(
String taskType, String workflowType, TaskModel.Status status) {
counter(
"task_update_conflict",
"workflowName",
workflowType,
"taskType",
taskType,
"taskStatus",
status.name());
}
public static void recordTaskUpdateError(String taskType, String workflowType) {
counter("task_update_error", "workflowName", workflowType, "taskType", taskType);
}
public static void recordTaskExtendLeaseError(String taskType, String workflowType) {
counter("task_extendLease_error", "workflowName", workflowType, "taskType", taskType);
}
public static void recordTaskQueueOpError(String taskType, String workflowType) {
counter("task_queue_op_error", "workflowName", workflowType, "taskType", taskType);
}
public static void recordWorkflowCompletion(
String workflowType, long duration, String ownerApp) {
getTimer(
"workflow_execution",
"workflowName",
workflowType,
"ownerApp",
StringUtils.defaultIfBlank(ownerApp, "unknown"))
.record(duration, TimeUnit.MILLISECONDS);
}
public static void recordUnackTime(String workflowType, long duration) {
getTimer("workflow_unack", "workflowName", workflowType)
.record(duration, TimeUnit.MILLISECONDS);
}
public static void recordTaskRateLimited(String taskDefName, int limit) {
gauge("task_rate_limited", limit, "taskType", taskDefName);
}
public static void recordTaskConcurrentExecutionLimited(String taskDefName, int limit) {
gauge("task_concurrent_execution_limited", limit, "taskType", taskDefName);
}
public static void recordEventQueueMessagesProcessed(
String queueType, String queueName, int count) {
getCounter("event_queue_messages_processed", "queueType", queueType, "queueName", queueName)
.increment(count);
}
public static void recordObservableQMessageReceivedErrors(String queueType) {
counter("observable_queue_error", "queueType", queueType);
}
public static void recordEventQueueMessagesHandled(String queueType, String queueName) {
counter("event_queue_messages_handled", "queueType", queueType, "queueName", queueName);
}
public static void recordEventQueueMessagesError(String queueType, String queueName) {
counter("event_queue_messages_error", "queueType", queueType, "queueName", queueName);
}
public static void recordEventExecutionSuccess(String event, String handler, String action) {
counter("event_execution_success", "event", event, "handler", handler, "action", action);
}
public static void recordEventExecutionError(
String event, String handler, String action, String exceptionClazz) {
counter(
"event_execution_error",
"event",
event,
"handler",
handler,
"action",
action,
"exception",
exceptionClazz);
}
public static void recordEventActionError(String action, String entityName, String event) {
counter("event_action_error", "action", action, "entityName", entityName, "event", event);
}
public static void recordDaoRequests(
String dao, String action, String taskType, String workflowType) {
counter(
"dao_requests",
"dao",
dao,
"action",
action,
"taskType",
StringUtils.defaultIfBlank(taskType, "unknown"),
"workflowType",
StringUtils.defaultIfBlank(workflowType, "unknown"));
}
public static void recordDaoEventRequests(String dao, String action, String event) {
counter("dao_event_requests", "dao", dao, "action", action, "event", event);
}
public static void recordDaoPayloadSize(
String dao, String action, String taskType, String workflowType, int size) {
gauge(
"dao_payload_size",
size,
"dao",
dao,
"action",
action,
"taskType",
StringUtils.defaultIfBlank(taskType, "unknown"),
"workflowType",
StringUtils.defaultIfBlank(workflowType, "unknown"));
}
public static void recordExternalPayloadStorageUsage(
String name, String operation, String payloadType) {
counter(
"external_payload_storage_usage",
"name",
name,
"operation",
operation,
"payloadType",
payloadType);
}
public static void recordDaoError(String dao, String action) {
counter("dao_errors", "dao", dao, "action", action);
}
public static void recordAckTaskError(String taskType) {
counter("task_ack_error", "taskType", taskType);
}
public static void recordESIndexTime(String action, String docType, long val) {
getTimer(action, "docType", docType).record(val, TimeUnit.MILLISECONDS);
}
public static void recordWorkerQueueSize(String queueType, int val) {
gauge("indexing_worker_queue", val, "queueType", queueType);
}
public static void recordDiscardedIndexingCount(String queueType) {
counter("discarded_index_count", "queueType", queueType);
}
public static void recordAcquireLockUnsuccessful() {
counter("acquire_lock_unsuccessful");
}
public static void recordAcquireLockFailure(String exceptionClassName) {
counter("acquire_lock_failure", "exceptionType", exceptionClassName);
}
public static void recordWorkflowArchived(String workflowType, WorkflowModel.Status status) {
counter("workflow_archived", "workflowName", workflowType, "workflowStatus", status.name());
}
public static void recordArchivalDelayQueueSize(int val) {
gauge("workflow_archival_delay_queue_size", val);
}
public static void recordDiscardedArchivalCount() {
counter("discarded_archival_count");
}
public static void recordSystemTaskWorkerPollingLimited(String queueName) {
counter("system_task_worker_polling_limited", "queueName", queueName);
}
public static void recordEventQueuePollSize(String queueType, int val) {
gauge("event_queue_poll", val, "queueType", queueType);
}
public static void recordQueueMessageRepushFromRepairService(String queueName) {
counter("queue_message_repushed", "queueName", queueName);
}
public static void recordTaskExecLogSize(int val) {
gauge("task_exec_log_size", val);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/metrics/WorkflowMonitor.java | core/src/main/java/com/netflix/conductor/metrics/WorkflowMonitor.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.metrics;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.service.MetadataService;
import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER;
@Component
@ConditionalOnProperty(
name = "conductor.workflow-monitor.enabled",
havingValue = "true",
matchIfMissing = true)
public class WorkflowMonitor {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowMonitor.class);
private final MetadataService metadataService;
private final QueueDAO queueDAO;
private final ExecutionDAOFacade executionDAOFacade;
private final int metadataRefreshInterval;
private final Set<WorkflowSystemTask> asyncSystemTasks;
private List<TaskDef> taskDefs;
private List<WorkflowDef> workflowDefs;
private int refreshCounter = 0;
public WorkflowMonitor(
MetadataService metadataService,
QueueDAO queueDAO,
ExecutionDAOFacade executionDAOFacade,
@Value("${conductor.workflow-monitor.metadata-refresh-interval:10}")
int metadataRefreshInterval,
@Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set<WorkflowSystemTask> asyncSystemTasks) {
this.metadataService = metadataService;
this.queueDAO = queueDAO;
this.executionDAOFacade = executionDAOFacade;
this.metadataRefreshInterval = metadataRefreshInterval;
this.asyncSystemTasks = asyncSystemTasks;
LOGGER.info("{} initialized.", WorkflowMonitor.class.getSimpleName());
}
@Scheduled(
initialDelayString = "${conductor.workflow-monitor.stats.initial-delay:120000}",
fixedDelayString = "${conductor.workflow-monitor.stats.delay:60000}")
public void reportMetrics() {
try {
if (refreshCounter <= 0) {
workflowDefs = metadataService.getWorkflowDefs();
taskDefs = new ArrayList<>(metadataService.getTaskDefs());
refreshCounter = metadataRefreshInterval;
}
getPendingWorkflowToOwnerAppMap(workflowDefs)
.forEach(
(workflowName, ownerApp) -> {
long count =
executionDAOFacade.getPendingWorkflowCount(workflowName);
Monitors.recordRunningWorkflows(count, workflowName, ownerApp);
});
taskDefs.forEach(
taskDef -> {
long size = queueDAO.getSize(taskDef.getName());
long inProgressCount =
executionDAOFacade.getInProgressTaskCount(taskDef.getName());
Monitors.recordQueueDepth(taskDef.getName(), size, taskDef.getOwnerApp());
if (taskDef.concurrencyLimit() > 0) {
Monitors.recordTaskInProgress(
taskDef.getName(), inProgressCount, taskDef.getOwnerApp());
}
});
asyncSystemTasks.forEach(
workflowSystemTask -> {
long size = queueDAO.getSize(workflowSystemTask.getTaskType());
long inProgressCount =
executionDAOFacade.getInProgressTaskCount(
workflowSystemTask.getTaskType());
Monitors.recordQueueDepth(workflowSystemTask.getTaskType(), size, "system");
Monitors.recordTaskInProgress(
workflowSystemTask.getTaskType(), inProgressCount, "system");
});
refreshCounter--;
} catch (Exception e) {
LOGGER.error("Error while publishing scheduled metrics", e);
}
}
/**
* Pending workflow data does not contain information about version. We only need the owner app
* and workflow name, and we only need to query for the workflow once.
*/
@VisibleForTesting
Map<String, String> getPendingWorkflowToOwnerAppMap(List<WorkflowDef> workflowDefs) {
final Map<String, List<WorkflowDef>> groupedWorkflowDefs =
workflowDefs.stream().collect(Collectors.groupingBy(WorkflowDef::getName));
Map<String, String> workflowNameToOwnerMap = new HashMap<>();
groupedWorkflowDefs.forEach(
(key, value) -> {
final WorkflowDef workflowDef =
value.stream()
.max(Comparator.comparing(WorkflowDef::getVersion))
.orElseThrow(NoSuchElementException::new);
workflowNameToOwnerMap.put(key, workflowDef.getOwnerApp());
});
return workflowNameToOwnerMap;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraint.java | core/src/main/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraint.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.validations;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.text.ParseException;
import java.time.format.DateTimeParseException;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.events.ScriptEvaluator;
import com.netflix.conductor.core.utils.DateTimeUtils;
import jakarta.validation.Constraint;
import jakarta.validation.ConstraintValidator;
import jakarta.validation.ConstraintValidatorContext;
import jakarta.validation.Payload;
import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationStatusParameter;
import static com.netflix.conductor.core.execution.tasks.Terminate.validateInputStatus;
import static com.netflix.conductor.core.execution.tasks.Wait.DURATION_INPUT;
import static com.netflix.conductor.core.execution.tasks.Wait.UNTIL_INPUT;
import static java.lang.annotation.ElementType.ANNOTATION_TYPE;
import static java.lang.annotation.ElementType.TYPE;
/**
* This constraint class validates following things. 1. Correct parameters are set depending on task
* type.
*/
@Documented
@Constraint(validatedBy = WorkflowTaskTypeConstraint.WorkflowTaskValidator.class)
@Target({TYPE, ANNOTATION_TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface WorkflowTaskTypeConstraint {
String message() default "";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
class WorkflowTaskValidator
implements ConstraintValidator<WorkflowTaskTypeConstraint, WorkflowTask> {
final String PARAM_REQUIRED_STRING_FORMAT =
"%s field is required for taskType: %s taskName: %s";
@Override
public void initialize(WorkflowTaskTypeConstraint constraintAnnotation) {}
@Override
public boolean isValid(WorkflowTask workflowTask, ConstraintValidatorContext context) {
context.disableDefaultConstraintViolation();
boolean valid = true;
// depending on task type check if required parameters are set or not
switch (workflowTask.getType()) {
case TaskType.TASK_TYPE_EVENT:
valid = isEventTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_DECISION:
valid = isDecisionTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_SWITCH:
valid = isSwitchTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_DYNAMIC:
valid = isDynamicTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC:
valid = isDynamicForkJoinValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_HTTP:
valid = isHttpTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_FORK_JOIN:
valid = isForkJoinTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_TERMINATE:
valid = isTerminateTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_KAFKA_PUBLISH:
valid = isKafkaPublishTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_DO_WHILE:
valid = isDoWhileTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_SUB_WORKFLOW:
valid = isSubWorkflowTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_JSON_JQ_TRANSFORM:
valid = isJSONJQTransformTaskValid(workflowTask, context);
break;
case TaskType.TASK_TYPE_WAIT:
valid = isWaitTaskValid(workflowTask, context);
break;
}
return valid;
}
private boolean isEventTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
if (workflowTask.getSink() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"sink",
TaskType.TASK_TYPE_EVENT,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isDecisionTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
if (workflowTask.getCaseValueParam() == null
&& workflowTask.getCaseExpression() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"caseValueParam or caseExpression",
TaskType.DECISION,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
if (workflowTask.getDecisionCases() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"decisionCases",
TaskType.DECISION,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
} else if ((workflowTask.getDecisionCases() != null
|| workflowTask.getCaseExpression() != null)
&& (workflowTask.getDecisionCases().size() == 0)) {
String message =
String.format(
"decisionCases should have atleast one task for taskType: %s taskName: %s",
TaskType.DECISION, workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
if (workflowTask.getCaseExpression() != null) {
try {
validateScriptExpression(
workflowTask.getCaseExpression(), workflowTask.getInputParameters());
} catch (Exception ee) {
String message =
String.format(
ee.getMessage() + ", taskType: DECISION taskName %s",
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
}
return valid;
}
private void validateScriptExpression(
String expression, Map<String, Object> inputParameters) {
try {
Object returnValue = ScriptEvaluator.eval(expression, inputParameters);
} catch (Exception e) {
throw new IllegalArgumentException(
String.format("Expression is not well formatted: %s", e.getMessage()));
}
}
private boolean isSwitchTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
if (workflowTask.getEvaluatorType() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"evaluatorType",
TaskType.SWITCH,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
} else if (workflowTask.getExpression() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"expression",
TaskType.SWITCH,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
if (workflowTask.getDecisionCases() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"decisionCases",
TaskType.SWITCH,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
} else if (workflowTask.getDecisionCases() != null
&& workflowTask.getDecisionCases().size() == 0) {
String message =
String.format(
"decisionCases should have atleast one task for taskType: %s taskName: %s",
TaskType.SWITCH, workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
if ("javascript".equals(workflowTask.getEvaluatorType())
&& workflowTask.getExpression() != null) {
try {
validateScriptExpression(
workflowTask.getExpression(), workflowTask.getInputParameters());
} catch (Exception ee) {
String message =
String.format(
ee.getMessage() + ", taskType: SWITCH taskName %s",
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
}
return valid;
}
private boolean isDoWhileTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
if (workflowTask.getLoopCondition() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"loopCondition",
TaskType.DO_WHILE,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
if (workflowTask.getLoopOver() == null || workflowTask.getLoopOver().size() == 0) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"loopOver",
TaskType.DO_WHILE,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isDynamicTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
if (workflowTask.getDynamicTaskNameParam() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"dynamicTaskNameParam",
TaskType.DYNAMIC,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isWaitTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
String duration =
Optional.ofNullable(workflowTask.getInputParameters().get(DURATION_INPUT))
.orElse("")
.toString();
String until =
Optional.ofNullable(workflowTask.getInputParameters().get(UNTIL_INPUT))
.orElse("")
.toString();
if (StringUtils.isNotBlank(duration) && StringUtils.isNotBlank(until)) {
String message =
"Both 'duration' and 'until' specified. Please provide only one input";
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
try {
if (StringUtils.isNotBlank(duration) && !(duration.startsWith("${"))) {
DateTimeUtils.parseDuration(duration);
} else if (StringUtils.isNotBlank(until) && !(until.startsWith("${"))) {
DateTimeUtils.parseDate(until);
}
} catch (DateTimeParseException e) {
String message = "Unable to parse date ";
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
} catch (IllegalArgumentException e) {
String message = "Either date or duration is passed as null ";
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
} catch (ParseException e) {
String message = "Unable to parse date ";
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
} catch (Exception e) {
String message = "Wait time specified is invalid. The duration must be in ";
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isDynamicForkJoinValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
// For DYNAMIC_FORK_JOIN_TASK support dynamicForkJoinTasksParam or combination of
// dynamicForkTasksParam and dynamicForkTasksInputParamName.
// Both are not allowed.
if (workflowTask.getDynamicForkJoinTasksParam() != null
&& (workflowTask.getDynamicForkTasksParam() != null
|| workflowTask.getDynamicForkTasksInputParamName() != null)) {
String message =
String.format(
"dynamicForkJoinTasksParam or combination of dynamicForkTasksInputParamName and dynamicForkTasksParam cam be used for taskType: %s taskName: %s",
TaskType.FORK_JOIN_DYNAMIC, workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
return false;
}
if (workflowTask.getDynamicForkJoinTasksParam() != null) {
return valid;
} else {
if (workflowTask.getDynamicForkTasksParam() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"dynamicForkTasksParam",
TaskType.FORK_JOIN_DYNAMIC,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
if (workflowTask.getDynamicForkTasksInputParamName() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"dynamicForkTasksInputParamName",
TaskType.FORK_JOIN_DYNAMIC,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
}
return valid;
}
private boolean isHttpTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
boolean isInputParameterSet = false;
boolean isInputTemplateSet = false;
// Either http_request in WorkflowTask inputParam should be set or in inputTemplate
// Taskdef should be set
if (workflowTask.getInputParameters() != null
&& workflowTask.getInputParameters().containsKey("http_request")) {
isInputParameterSet = true;
}
TaskDef taskDef =
Optional.ofNullable(workflowTask.getTaskDefinition())
.orElse(
ValidationContext.getMetadataDAO()
.getTaskDef(workflowTask.getName()));
if (taskDef != null
&& taskDef.getInputTemplate() != null
&& taskDef.getInputTemplate().containsKey("http_request")) {
isInputTemplateSet = true;
}
if (!(isInputParameterSet || isInputTemplateSet)) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"inputParameters.http_request",
TaskType.HTTP,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isForkJoinTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
if (workflowTask.getForkTasks() != null && (workflowTask.getForkTasks().size() == 0)) {
String message =
String.format(
"forkTasks should have atleast one task for taskType: %s taskName: %s",
TaskType.FORK_JOIN, workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isTerminateTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
Object inputStatusParam =
workflowTask.getInputParameters().get(getTerminationStatusParameter());
if (workflowTask.isOptional()) {
String message =
String.format(
"terminate task cannot be optional, taskName: %s",
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
if (inputStatusParam == null || !validateInputStatus(inputStatusParam.toString())) {
String message =
String.format(
"terminate task must have an %s parameter and must be set to COMPLETED or FAILED, taskName: %s",
getTerminationStatusParameter(), workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isKafkaPublishTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
boolean isInputParameterSet = false;
boolean isInputTemplateSet = false;
// Either kafka_request in WorkflowTask inputParam should be set or in inputTemplate
// Taskdef should be set
if (workflowTask.getInputParameters() != null
&& workflowTask.getInputParameters().containsKey("kafka_request")) {
isInputParameterSet = true;
}
TaskDef taskDef =
Optional.ofNullable(workflowTask.getTaskDefinition())
.orElse(
ValidationContext.getMetadataDAO()
.getTaskDef(workflowTask.getName()));
if (taskDef != null
&& taskDef.getInputTemplate() != null
&& taskDef.getInputTemplate().containsKey("kafka_request")) {
isInputTemplateSet = true;
}
if (!(isInputParameterSet || isInputTemplateSet)) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"inputParameters.kafka_request",
TaskType.KAFKA_PUBLISH,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isSubWorkflowTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
if (workflowTask.getSubWorkflowParam() == null) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"subWorkflowParam",
TaskType.SUB_WORKFLOW,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
private boolean isJSONJQTransformTaskValid(
WorkflowTask workflowTask, ConstraintValidatorContext context) {
boolean valid = true;
boolean isInputParameterSet = false;
boolean isInputTemplateSet = false;
// Either queryExpression in WorkflowTask inputParam should be set or in inputTemplate
// Taskdef should be set
if (workflowTask.getInputParameters() != null
&& workflowTask.getInputParameters().containsKey("queryExpression")) {
isInputParameterSet = true;
}
TaskDef taskDef =
Optional.ofNullable(workflowTask.getTaskDefinition())
.orElse(
ValidationContext.getMetadataDAO()
.getTaskDef(workflowTask.getName()));
if (taskDef != null
&& taskDef.getInputTemplate() != null
&& taskDef.getInputTemplate().containsKey("queryExpression")) {
isInputTemplateSet = true;
}
if (!(isInputParameterSet || isInputTemplateSet)) {
String message =
String.format(
PARAM_REQUIRED_STRING_FORMAT,
"inputParameters.queryExpression",
TaskType.JSON_JQ_TRANSFORM,
workflowTask.getName());
context.buildConstraintViolationWithTemplate(message).addConstraintViolation();
valid = false;
}
return valid;
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/validations/ValidationContext.java | core/src/main/java/com/netflix/conductor/validations/ValidationContext.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.validations;
import com.netflix.conductor.dao.MetadataDAO;
/**
* This context is defined to get access to {@link MetadataDAO} inside {@link
* WorkflowTaskTypeConstraint} constraint validator to validate {@link
* com.netflix.conductor.common.metadata.workflow.WorkflowTask}.
*/
public class ValidationContext {
private static MetadataDAO metadataDAO;
public static void initialize(MetadataDAO metadataDAO) {
ValidationContext.metadataDAO = metadataDAO;
}
public static MetadataDAO getMetadataDAO() {
return metadataDAO;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/WorkflowContext.java | core/src/main/java/com/netflix/conductor/core/WorkflowContext.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core;
/** Store the authentication context, app or username or both */
public class WorkflowContext {
public static final ThreadLocal<WorkflowContext> THREAD_LOCAL =
InheritableThreadLocal.withInitial(() -> new WorkflowContext("", ""));
private final String clientApp;
private final String userName;
public WorkflowContext(String clientApp) {
this.clientApp = clientApp;
this.userName = null;
}
public WorkflowContext(String clientApp, String userName) {
this.clientApp = clientApp;
this.userName = userName;
}
public static WorkflowContext get() {
return THREAD_LOCAL.get();
}
public static void set(WorkflowContext ctx) {
THREAD_LOCAL.set(ctx);
}
public static void unset() {
THREAD_LOCAL.remove();
}
/**
* @return the clientApp
*/
public String getClientApp() {
return clientApp;
}
/**
* @return the username
*/
public String getUserName() {
return userName;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/LifecycleAwareComponent.java | core/src/main/java/com/netflix/conductor/core/LifecycleAwareComponent.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.SmartLifecycle;
public abstract class LifecycleAwareComponent implements SmartLifecycle {
private volatile boolean running = false;
private static final Logger LOGGER = LoggerFactory.getLogger(LifecycleAwareComponent.class);
@Override
public final void start() {
running = true;
LOGGER.info("{} started.", getClass().getSimpleName());
doStart();
}
@Override
public final void stop() {
running = false;
LOGGER.info("{} stopped.", getClass().getSimpleName());
doStop();
}
@Override
public final boolean isRunning() {
return running;
}
public void doStart() {}
public void doStop() {}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/storage/DummyPayloadStorage.java | core/src/main/java/com/netflix/conductor/core/storage/DummyPayloadStorage.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.storage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.UUID;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* A dummy implementation of {@link ExternalPayloadStorage} used when no external payload is
* configured
*/
public class DummyPayloadStorage implements ExternalPayloadStorage {
private static final Logger LOGGER = LoggerFactory.getLogger(DummyPayloadStorage.class);
private ObjectMapper objectMapper;
private File payloadDir;
public DummyPayloadStorage() {
try {
this.objectMapper = new ObjectMapper();
this.payloadDir = Files.createTempDirectory("payloads").toFile();
LOGGER.info(
"{} initialized in directory: {}",
this.getClass().getSimpleName(),
payloadDir.getAbsolutePath());
} catch (IOException ioException) {
LOGGER.error(
"Exception encountered while creating payloads directory : {}",
ioException.getMessage());
}
}
@Override
public ExternalStorageLocation getLocation(
Operation operation, PayloadType payloadType, String path) {
ExternalStorageLocation location = new ExternalStorageLocation();
location.setPath(path + UUID.randomUUID() + ".json");
return location;
}
@Override
public void upload(String path, InputStream payload, long payloadSize) {
File file = new File(payloadDir, path);
String filePath = file.getAbsolutePath();
try {
if (!file.exists() && file.createNewFile()) {
LOGGER.debug("Created file: {}", filePath);
}
IOUtils.copy(payload, new FileOutputStream(file));
LOGGER.debug("Written to {}", filePath);
} catch (IOException e) {
// just handle this exception here and return empty map so that test will fail in case
// this exception is thrown
LOGGER.error("Error writing to {}", filePath);
} finally {
try {
if (payload != null) {
payload.close();
}
} catch (IOException e) {
LOGGER.warn("Unable to close input stream when writing to file");
}
}
}
@Override
public InputStream download(String path) {
try {
LOGGER.debug("Reading from {}", path);
return new FileInputStream(new File(payloadDir, path));
} catch (IOException e) {
LOGGER.error("Error reading {}", path, e);
return null;
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/dal/ExecutionDAOFacade.java | core/src/main/java/com/netflix/conductor/core/dal/ExecutionDAOFacade.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.dal;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.metadata.tasks.PollData;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskExecLog;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.run.WorkflowSummary;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.dao.*;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.annotation.PreDestroy;
import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE;
/**
* Service that acts as a facade for accessing execution data from the {@link ExecutionDAO}, {@link
* RateLimitingDAO} and {@link IndexDAO} storage layers
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Component
public class ExecutionDAOFacade {
private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionDAOFacade.class);
private static final String ARCHIVED_FIELD = "archived";
private static final String RAW_JSON_FIELD = "rawJSON";
private final ExecutionDAO executionDAO;
private final QueueDAO queueDAO;
private final IndexDAO indexDAO;
private final RateLimitingDAO rateLimitingDao;
private final ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO;
private final PollDataDAO pollDataDAO;
private final ObjectMapper objectMapper;
private final ConductorProperties properties;
private final ExternalPayloadStorageUtils externalPayloadStorageUtils;
private final ScheduledThreadPoolExecutor scheduledThreadPoolExecutor;
public ExecutionDAOFacade(
ExecutionDAO executionDAO,
QueueDAO queueDAO,
IndexDAO indexDAO,
RateLimitingDAO rateLimitingDao,
ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO,
PollDataDAO pollDataDAO,
ObjectMapper objectMapper,
ConductorProperties properties,
ExternalPayloadStorageUtils externalPayloadStorageUtils) {
this.executionDAO = executionDAO;
this.queueDAO = queueDAO;
this.indexDAO = indexDAO;
this.rateLimitingDao = rateLimitingDao;
this.concurrentExecutionLimitDAO = concurrentExecutionLimitDAO;
this.pollDataDAO = pollDataDAO;
this.objectMapper = objectMapper;
this.properties = properties;
this.externalPayloadStorageUtils = externalPayloadStorageUtils;
this.scheduledThreadPoolExecutor =
new ScheduledThreadPoolExecutor(
4,
(runnable, executor) -> {
LOGGER.warn(
"Request {} to delay updating index dropped in executor {}",
runnable,
executor);
Monitors.recordDiscardedIndexingCount("delayQueue");
});
this.scheduledThreadPoolExecutor.setRemoveOnCancelPolicy(true);
}
@PreDestroy
public void shutdownExecutorService() {
try {
LOGGER.info("Gracefully shutdown executor service");
scheduledThreadPoolExecutor.shutdown();
if (scheduledThreadPoolExecutor.awaitTermination(
properties.getAsyncUpdateDelay().getSeconds(), TimeUnit.SECONDS)) {
LOGGER.debug("tasks completed, shutting down");
} else {
LOGGER.warn(
"Forcing shutdown after waiting for {} seconds",
properties.getAsyncUpdateDelay());
scheduledThreadPoolExecutor.shutdownNow();
}
} catch (InterruptedException ie) {
LOGGER.warn(
"Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue");
scheduledThreadPoolExecutor.shutdownNow();
Thread.currentThread().interrupt();
}
}
public WorkflowModel getWorkflowModel(String workflowId, boolean includeTasks) {
WorkflowModel workflowModel = getWorkflowModelFromDataStore(workflowId, includeTasks);
populateWorkflowAndTaskPayloadData(workflowModel);
return workflowModel;
}
/**
* Fetches the {@link Workflow} object from the data store given the id. Attempts to fetch from
* {@link ExecutionDAO} first, if not found, attempts to fetch from {@link IndexDAO}.
*
* @param workflowId the id of the workflow to be fetched
* @param includeTasks if true, fetches the {@link Task} data in the workflow.
* @return the {@link Workflow} object
* @throws NotFoundException no such {@link Workflow} is found.
* @throws TransientException parsing the {@link Workflow} object fails.
*/
public Workflow getWorkflow(String workflowId, boolean includeTasks) {
return getWorkflowModelFromDataStore(workflowId, includeTasks).toWorkflow();
}
private WorkflowModel getWorkflowModelFromDataStore(String workflowId, boolean includeTasks) {
WorkflowModel workflow = executionDAO.getWorkflow(workflowId, includeTasks);
if (workflow == null) {
LOGGER.debug("Workflow {} not found in executionDAO, checking indexDAO", workflowId);
String json = indexDAO.get(workflowId, RAW_JSON_FIELD);
if (json == null) {
String errorMsg = String.format("No such workflow found by id: %s", workflowId);
LOGGER.error(errorMsg);
throw new NotFoundException(errorMsg);
}
try {
workflow = objectMapper.readValue(json, WorkflowModel.class);
if (!includeTasks) {
workflow.getTasks().clear();
}
} catch (IOException e) {
String errorMsg = String.format("Error reading workflow: %s", workflowId);
LOGGER.error(errorMsg);
throw new TransientException(errorMsg, e);
}
}
return workflow;
}
/**
* Retrieve all workflow executions with the given correlationId and workflow type Uses the
* {@link IndexDAO} to search across workflows if the {@link ExecutionDAO} cannot perform
* searches across workflows.
*
* @param workflowName, workflow type to be queried
* @param correlationId the correlation id to be queried
* @param includeTasks if true, fetches the {@link Task} data within the workflows
* @return the list of {@link Workflow} executions matching the correlationId
*/
public List<Workflow> getWorkflowsByCorrelationId(
String workflowName, String correlationId, boolean includeTasks) {
if (!executionDAO.canSearchAcrossWorkflows()) {
String query =
"correlationId='" + correlationId + "' AND workflowType='" + workflowName + "'";
SearchResult<String> result = indexDAO.searchWorkflows(query, "*", 0, 1000, null);
return result.getResults().stream()
.parallel()
.map(
workflowId -> {
try {
return getWorkflow(workflowId, includeTasks);
} catch (NotFoundException e) {
// This might happen when the workflow archival failed and the
// workflow was removed from primary datastore
LOGGER.error(
"Error getting the workflow: {} for correlationId: {} from datastore/index",
workflowId,
correlationId,
e);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
return executionDAO
.getWorkflowsByCorrelationId(workflowName, correlationId, includeTasks)
.stream()
.map(WorkflowModel::toWorkflow)
.collect(Collectors.toList());
}
public List<Workflow> getWorkflowsByName(String workflowName, Long startTime, Long endTime) {
return executionDAO.getWorkflowsByType(workflowName, startTime, endTime).stream()
.map(WorkflowModel::toWorkflow)
.collect(Collectors.toList());
}
public List<Workflow> getPendingWorkflowsByName(String workflowName, int version) {
return executionDAO.getPendingWorkflowsByType(workflowName, version).stream()
.map(WorkflowModel::toWorkflow)
.collect(Collectors.toList());
}
public List<String> getRunningWorkflowIds(String workflowName, int version) {
return executionDAO.getRunningWorkflowIds(workflowName, version);
}
public long getPendingWorkflowCount(String workflowName) {
return executionDAO.getPendingWorkflowCount(workflowName);
}
/**
* Creates a new workflow in the data store
*
* @param workflowModel the workflow to be created
* @return the id of the created workflow
*/
public String createWorkflow(WorkflowModel workflowModel) {
externalizeWorkflowData(workflowModel);
executionDAO.createWorkflow(workflowModel);
// Add to decider queue
queueDAO.push(
DECIDER_QUEUE,
workflowModel.getWorkflowId(),
workflowModel.getPriority(),
properties.getWorkflowOffsetTimeout().getSeconds());
if (properties.isAsyncIndexingEnabled()) {
indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow()));
} else {
indexDAO.indexWorkflow(new WorkflowSummary(workflowModel.toWorkflow()));
}
return workflowModel.getWorkflowId();
}
private void externalizeTaskData(TaskModel taskModel) {
externalPayloadStorageUtils.verifyAndUpload(
taskModel, ExternalPayloadStorage.PayloadType.TASK_INPUT);
externalPayloadStorageUtils.verifyAndUpload(
taskModel, ExternalPayloadStorage.PayloadType.TASK_OUTPUT);
}
private void externalizeWorkflowData(WorkflowModel workflowModel) {
externalPayloadStorageUtils.verifyAndUpload(
workflowModel, ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT);
externalPayloadStorageUtils.verifyAndUpload(
workflowModel, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT);
}
/**
* Updates the given workflow in the data store
*
* @param workflowModel the workflow tp be updated
* @return the id of the updated workflow
*/
public String updateWorkflow(WorkflowModel workflowModel) {
workflowModel.setUpdatedTime(System.currentTimeMillis());
if (workflowModel.getStatus().isTerminal()) {
workflowModel.setEndTime(System.currentTimeMillis());
}
externalizeWorkflowData(workflowModel);
executionDAO.updateWorkflow(workflowModel);
if (properties.isAsyncIndexingEnabled()) {
if (workflowModel.getStatus().isTerminal()
&& workflowModel.getEndTime() - workflowModel.getCreateTime()
< properties.getAsyncUpdateShortRunningWorkflowDuration().toMillis()) {
final String workflowId = workflowModel.getWorkflowId();
DelayWorkflowUpdate delayWorkflowUpdate = new DelayWorkflowUpdate(workflowId);
LOGGER.debug(
"Delayed updating workflow: {} in the index by {} seconds",
workflowId,
properties.getAsyncUpdateDelay());
scheduledThreadPoolExecutor.schedule(
delayWorkflowUpdate,
properties.getAsyncUpdateDelay().getSeconds(),
TimeUnit.SECONDS);
Monitors.recordWorkerQueueSize(
"delayQueue", scheduledThreadPoolExecutor.getQueue().size());
} else {
indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow()));
}
if (workflowModel.getStatus().isTerminal()) {
workflowModel
.getTasks()
.forEach(
taskModel ->
indexDAO.asyncIndexTask(
new TaskSummary(taskModel.toTask())));
}
} else {
indexDAO.indexWorkflow(new WorkflowSummary(workflowModel.toWorkflow()));
}
return workflowModel.getWorkflowId();
}
public void removeFromPendingWorkflow(String workflowType, String workflowId) {
executionDAO.removeFromPendingWorkflow(workflowType, workflowId);
}
/**
* Removes the workflow from the data store.
*
* @param workflowId the id of the workflow to be removed
* @param archiveWorkflow if true, the workflow and associated tasks will be archived in the
* {@link IndexDAO} after removal from {@link ExecutionDAO}.
*/
public void removeWorkflow(String workflowId, boolean archiveWorkflow) {
WorkflowModel workflow = getWorkflowModelFromDataStore(workflowId, true);
executionDAO.removeWorkflow(workflowId);
try {
removeWorkflowIndex(workflow, archiveWorkflow);
} catch (JsonProcessingException e) {
throw new TransientException("Workflow can not be serialized to json", e);
}
workflow.getTasks()
.forEach(
task -> {
try {
removeTaskIndex(workflow, task, archiveWorkflow);
} catch (JsonProcessingException e) {
throw new TransientException(
String.format(
"Task %s of workflow %s can not be serialized to json",
task.getTaskId(), workflow.getWorkflowId()),
e);
}
try {
queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId());
} catch (Exception e) {
LOGGER.info(
"Error removing task: {} of workflow: {} from {} queue",
workflowId,
task.getTaskId(),
QueueUtils.getQueueName(task),
e);
}
});
try {
queueDAO.remove(DECIDER_QUEUE, workflowId);
} catch (Exception e) {
LOGGER.info("Error removing workflow: {} from decider queue", workflowId, e);
}
}
private void removeWorkflowIndex(WorkflowModel workflow, boolean archiveWorkflow)
throws JsonProcessingException {
if (archiveWorkflow) {
if (workflow.getStatus().isTerminal()) {
// Only allow archival if workflow is in terminal state
// DO NOT archive async, since if archival errors out, workflow data will be lost
indexDAO.updateWorkflow(
workflow.getWorkflowId(),
new String[] {RAW_JSON_FIELD, ARCHIVED_FIELD},
new Object[] {objectMapper.writeValueAsString(workflow), true});
} else {
throw new IllegalArgumentException(
String.format(
"Cannot archive workflow: %s with status: %s",
workflow.getWorkflowId(), workflow.getStatus()));
}
} else {
// Not archiving, also remove workflow from index
indexDAO.asyncRemoveWorkflow(workflow.getWorkflowId());
}
}
public void removeWorkflowWithExpiry(
String workflowId, boolean archiveWorkflow, int ttlSeconds) {
try {
WorkflowModel workflow = getWorkflowModelFromDataStore(workflowId, true);
removeWorkflowIndex(workflow, archiveWorkflow);
// remove workflow from DAO with TTL
executionDAO.removeWorkflowWithExpiry(workflowId, ttlSeconds);
} catch (Exception e) {
Monitors.recordDaoError("executionDao", "removeWorkflow");
throw new TransientException("Error removing workflow: " + workflowId, e);
}
}
/**
* Reset the workflow state by removing from the {@link ExecutionDAO} and removing this workflow
* from the {@link IndexDAO}.
*
* @param workflowId the workflow id to be reset
*/
public void resetWorkflow(String workflowId) {
getWorkflowModelFromDataStore(workflowId, true);
executionDAO.removeWorkflow(workflowId);
try {
if (properties.isAsyncIndexingEnabled()) {
indexDAO.asyncRemoveWorkflow(workflowId);
} else {
indexDAO.removeWorkflow(workflowId);
}
} catch (Exception e) {
throw new TransientException("Error resetting workflow state: " + workflowId, e);
}
}
public List<TaskModel> createTasks(List<TaskModel> tasks) {
tasks.forEach(this::externalizeTaskData);
return executionDAO.createTasks(tasks);
}
public List<Task> getTasksForWorkflow(String workflowId) {
return getTaskModelsForWorkflow(workflowId).stream()
.map(TaskModel::toTask)
.collect(Collectors.toList());
}
public List<TaskModel> getTaskModelsForWorkflow(String workflowId) {
return executionDAO.getTasksForWorkflow(workflowId);
}
public TaskModel getTaskModel(String taskId) {
TaskModel taskModel = getTaskFromDatastore(taskId);
if (taskModel != null) {
populateTaskData(taskModel);
}
return taskModel;
}
public Task getTask(String taskId) {
TaskModel taskModel = getTaskFromDatastore(taskId);
if (taskModel != null) {
return taskModel.toTask();
}
return null;
}
private TaskModel getTaskFromDatastore(String taskId) {
return executionDAO.getTask(taskId);
}
public List<Task> getTasksByName(String taskName, String startKey, int count) {
return executionDAO.getTasks(taskName, startKey, count).stream()
.map(TaskModel::toTask)
.collect(Collectors.toList());
}
public List<Task> getPendingTasksForTaskType(String taskType) {
return executionDAO.getPendingTasksForTaskType(taskType).stream()
.map(TaskModel::toTask)
.collect(Collectors.toList());
}
public long getInProgressTaskCount(String taskDefName) {
return executionDAO.getInProgressTaskCount(taskDefName);
}
/**
* Sets the update time for the task. Sets the end time for the task (if task is in terminal
* state and end time is not set). Updates the task in the {@link ExecutionDAO} first, then
* stores it in the {@link IndexDAO}.
*
* @param taskModel the task to be updated in the data store
* @throws TransientException if the {@link IndexDAO} or {@link ExecutionDAO} operations fail.
* @throws com.netflix.conductor.core.exception.NonTransientException if the externalization of
* payload fails.
*/
public void updateTask(TaskModel taskModel) {
if (taskModel.getStatus() != null) {
if (!taskModel.getStatus().isTerminal()
|| (taskModel.getStatus().isTerminal() && taskModel.getUpdateTime() == 0)) {
taskModel.setUpdateTime(System.currentTimeMillis());
}
if (taskModel.getStatus().isTerminal() && taskModel.getEndTime() == 0) {
taskModel.setEndTime(System.currentTimeMillis());
}
}
externalizeTaskData(taskModel);
executionDAO.updateTask(taskModel);
try {
/*
* Indexing a task for every update adds a lot of volume. That is ok but if async indexing
* is enabled and tasks are stored in memory until a block has completed, we would lose a lot
* of tasks on a system failure. So only index for each update if async indexing is not enabled.
* If it *is* enabled, tasks will be indexed only when a workflow is in terminal state.
*/
if (!properties.isAsyncIndexingEnabled() && properties.isTaskIndexingEnabled()) {
indexDAO.indexTask(new TaskSummary(taskModel.toTask()));
}
} catch (TerminateWorkflowException e) {
// re-throw it so we can terminate the workflow
throw e;
} catch (Exception e) {
String errorMsg =
String.format(
"Error updating task: %s in workflow: %s",
taskModel.getTaskId(), taskModel.getWorkflowInstanceId());
LOGGER.error(errorMsg, e);
throw new TransientException(errorMsg, e);
}
}
public void updateTasks(List<TaskModel> tasks) {
tasks.forEach(this::updateTask);
}
public void removeTask(String taskId) {
executionDAO.removeTask(taskId);
}
private void removeTaskIndex(WorkflowModel workflow, TaskModel task, boolean archiveTask)
throws JsonProcessingException {
if (archiveTask) {
if (task.getStatus().isTerminal()) {
// Only allow archival if task is in terminal state
// DO NOT archive async, since if archival errors out, task data will be lost
indexDAO.updateTask(
workflow.getWorkflowId(),
task.getTaskId(),
new String[] {ARCHIVED_FIELD},
new Object[] {true});
} else {
throw new IllegalArgumentException(
String.format(
"Cannot archive task: %s of workflow: %s with status: %s",
task.getTaskId(), workflow.getWorkflowId(), task.getStatus()));
}
} else {
// Not archiving, remove task from index
indexDAO.asyncRemoveTask(workflow.getWorkflowId(), task.getTaskId());
}
}
public void extendLease(TaskModel taskModel) {
taskModel.setUpdateTime(System.currentTimeMillis());
executionDAO.updateTask(taskModel);
}
public List<PollData> getTaskPollData(String taskName) {
return pollDataDAO.getPollData(taskName);
}
public List<PollData> getAllPollData() {
return pollDataDAO.getAllPollData();
}
public PollData getTaskPollDataByDomain(String taskName, String domain) {
try {
return pollDataDAO.getPollData(taskName, domain);
} catch (Exception e) {
LOGGER.error(
"Error fetching pollData for task: '{}', domain: '{}'", taskName, domain, e);
return null;
}
}
public void updateTaskLastPoll(String taskName, String domain, String workerId) {
try {
pollDataDAO.updateLastPollData(taskName, domain, workerId);
} catch (Exception e) {
LOGGER.error(
"Error updating PollData for task: {} in domain: {} from worker: {}",
taskName,
domain,
workerId,
e);
Monitors.error(this.getClass().getCanonicalName(), "updateTaskLastPoll");
}
}
/**
* Save the {@link EventExecution} to the data store Saves to {@link ExecutionDAO} first, if
* this succeeds then saves to the {@link IndexDAO}.
*
* @param eventExecution the {@link EventExecution} to be saved
* @return true if save succeeds, false otherwise.
*/
public boolean addEventExecution(EventExecution eventExecution) {
boolean added = executionDAO.addEventExecution(eventExecution);
if (added) {
indexEventExecution(eventExecution);
}
return added;
}
public void updateEventExecution(EventExecution eventExecution) {
executionDAO.updateEventExecution(eventExecution);
indexEventExecution(eventExecution);
}
private void indexEventExecution(EventExecution eventExecution) {
if (properties.isEventExecutionIndexingEnabled()) {
if (properties.isAsyncIndexingEnabled()) {
indexDAO.asyncAddEventExecution(eventExecution);
} else {
indexDAO.addEventExecution(eventExecution);
}
}
}
public void removeEventExecution(EventExecution eventExecution) {
executionDAO.removeEventExecution(eventExecution);
}
public boolean exceedsInProgressLimit(TaskModel task) {
return concurrentExecutionLimitDAO.exceedsLimit(task);
}
public boolean exceedsRateLimitPerFrequency(TaskModel task, TaskDef taskDef) {
return rateLimitingDao.exceedsRateLimitPerFrequency(task, taskDef);
}
public void addTaskExecLog(List<TaskExecLog> logs) {
if (properties.isTaskExecLogIndexingEnabled() && !logs.isEmpty()) {
Monitors.recordTaskExecLogSize(logs.size());
int taskExecLogSizeLimit = properties.getTaskExecLogSizeLimit();
if (logs.size() > taskExecLogSizeLimit) {
LOGGER.warn(
"Task Execution log size: {} for taskId: {} exceeds the limit: {}",
logs.size(),
logs.get(0).getTaskId(),
taskExecLogSizeLimit);
logs = logs.stream().limit(taskExecLogSizeLimit).collect(Collectors.toList());
}
if (properties.isAsyncIndexingEnabled()) {
indexDAO.asyncAddTaskExecutionLogs(logs);
} else {
indexDAO.addTaskExecutionLogs(logs);
}
}
}
public void addMessage(String queue, Message message) {
if (properties.isAsyncIndexingEnabled()) {
indexDAO.asyncAddMessage(queue, message);
} else {
indexDAO.addMessage(queue, message);
}
}
public SearchResult<String> searchWorkflows(
String query, String freeText, int start, int count, List<String> sort) {
return indexDAO.searchWorkflows(query, freeText, start, count, sort);
}
public SearchResult<WorkflowSummary> searchWorkflowSummary(
String query, String freeText, int start, int count, List<String> sort) {
return indexDAO.searchWorkflowSummary(query, freeText, start, count, sort);
}
public SearchResult<String> searchTasks(
String query, String freeText, int start, int count, List<String> sort) {
return indexDAO.searchTasks(query, freeText, start, count, sort);
}
public SearchResult<TaskSummary> searchTaskSummary(
String query, String freeText, int start, int count, List<String> sort) {
return indexDAO.searchTaskSummary(query, freeText, start, count, sort);
}
public List<TaskExecLog> getTaskExecutionLogs(String taskId) {
return properties.isTaskExecLogIndexingEnabled()
? indexDAO.getTaskExecutionLogs(taskId)
: Collections.emptyList();
}
/**
* Populates the workflow input data and the tasks input/output data if stored in external
* payload storage.
*
* @param workflowModel the workflowModel for which the payload data needs to be populated from
* external storage (if applicable)
*/
public void populateWorkflowAndTaskPayloadData(WorkflowModel workflowModel) {
if (StringUtils.isNotBlank(workflowModel.getExternalInputPayloadStoragePath())) {
Map<String, Object> workflowInputParams =
externalPayloadStorageUtils.downloadPayload(
workflowModel.getExternalInputPayloadStoragePath());
Monitors.recordExternalPayloadStorageUsage(
workflowModel.getWorkflowName(),
ExternalPayloadStorage.Operation.READ.toString(),
ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT.toString());
workflowModel.internalizeInput(workflowInputParams);
}
if (StringUtils.isNotBlank(workflowModel.getExternalOutputPayloadStoragePath())) {
Map<String, Object> workflowOutputParams =
externalPayloadStorageUtils.downloadPayload(
workflowModel.getExternalOutputPayloadStoragePath());
Monitors.recordExternalPayloadStorageUsage(
workflowModel.getWorkflowName(),
ExternalPayloadStorage.Operation.READ.toString(),
ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT.toString());
workflowModel.internalizeOutput(workflowOutputParams);
}
workflowModel.getTasks().forEach(this::populateTaskData);
}
public void populateTaskData(TaskModel taskModel) {
if (StringUtils.isNotBlank(taskModel.getExternalOutputPayloadStoragePath())) {
Map<String, Object> outputData =
externalPayloadStorageUtils.downloadPayload(
taskModel.getExternalOutputPayloadStoragePath());
taskModel.internalizeOutput(outputData);
Monitors.recordExternalPayloadStorageUsage(
taskModel.getTaskDefName(),
ExternalPayloadStorage.Operation.READ.toString(),
ExternalPayloadStorage.PayloadType.TASK_OUTPUT.toString());
}
if (StringUtils.isNotBlank(taskModel.getExternalInputPayloadStoragePath())) {
Map<String, Object> inputData =
externalPayloadStorageUtils.downloadPayload(
taskModel.getExternalInputPayloadStoragePath());
taskModel.internalizeInput(inputData);
Monitors.recordExternalPayloadStorageUsage(
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | true |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAOConfiguration.java | core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAOConfiguration.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.index;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.netflix.conductor.dao.IndexDAO;
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(name = "conductor.indexing.enabled", havingValue = "false")
public class NoopIndexDAOConfiguration {
@Bean
public IndexDAO noopIndexDAO() {
return new NoopIndexDAO();
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAO.java | core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAO.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.index;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import com.netflix.conductor.common.metadata.events.EventExecution;
import com.netflix.conductor.common.metadata.tasks.TaskExecLog;
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.common.run.WorkflowSummary;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.dao.IndexDAO;
/**
* Dummy implementation of {@link IndexDAO} which does nothing. Nothing is ever indexed, and no
* results are ever returned.
*/
public class NoopIndexDAO implements IndexDAO {
@Override
public void setup() {}
@Override
public void indexWorkflow(WorkflowSummary workflowSummary) {}
@Override
public CompletableFuture<Void> asyncIndexWorkflow(WorkflowSummary workflowSummary) {
return CompletableFuture.completedFuture(null);
}
@Override
public void indexTask(TaskSummary taskSummary) {}
@Override
public CompletableFuture<Void> asyncIndexTask(TaskSummary taskSummary) {
return CompletableFuture.completedFuture(null);
}
@Override
public SearchResult<String> searchWorkflows(
String query, String freeText, int start, int count, List<String> sort) {
return new SearchResult<>(0, Collections.emptyList());
}
@Override
public SearchResult<WorkflowSummary> searchWorkflowSummary(
String query, String freeText, int start, int count, List<String> sort) {
return new SearchResult<>(0, Collections.emptyList());
}
@Override
public SearchResult<String> searchTasks(
String query, String freeText, int start, int count, List<String> sort) {
return new SearchResult<>(0, Collections.emptyList());
}
@Override
public SearchResult<TaskSummary> searchTaskSummary(
String query, String freeText, int start, int count, List<String> sort) {
return new SearchResult<>(0, Collections.emptyList());
}
@Override
public void removeWorkflow(String workflowId) {}
@Override
public CompletableFuture<Void> asyncRemoveWorkflow(String workflowId) {
return CompletableFuture.completedFuture(null);
}
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {}
@Override
public CompletableFuture<Void> asyncUpdateWorkflow(
String workflowInstanceId, String[] keys, Object[] values) {
return CompletableFuture.completedFuture(null);
}
@Override
public void removeTask(String workflowId, String taskId) {}
@Override
public CompletableFuture<Void> asyncRemoveTask(String workflowId, String taskId) {
return CompletableFuture.completedFuture(null);
}
@Override
public void updateTask(String workflowId, String taskId, String[] keys, Object[] values) {}
@Override
public CompletableFuture<Void> asyncUpdateTask(
String workflowId, String taskId, String[] keys, Object[] values) {
return CompletableFuture.completedFuture(null);
}
@Override
public String get(String workflowInstanceId, String key) {
return null;
}
@Override
public void addTaskExecutionLogs(List<TaskExecLog> logs) {}
@Override
public CompletableFuture<Void> asyncAddTaskExecutionLogs(List<TaskExecLog> logs) {
return CompletableFuture.completedFuture(null);
}
@Override
public List<TaskExecLog> getTaskExecutionLogs(String taskId) {
return Collections.emptyList();
}
@Override
public void addEventExecution(EventExecution eventExecution) {}
@Override
public List<EventExecution> getEventExecutions(String event) {
return Collections.emptyList();
}
@Override
public CompletableFuture<Void> asyncAddEventExecution(EventExecution eventExecution) {
return null;
}
@Override
public void addMessage(String queue, Message msg) {}
@Override
public CompletableFuture<Void> asyncAddMessage(String queue, Message message) {
return CompletableFuture.completedFuture(null);
}
@Override
public List<Message> getMessages(String queue) {
return Collections.emptyList();
}
@Override
public List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays) {
return Collections.emptyList();
}
@Override
public long getWorkflowCount(String query, String freeText) {
return 0;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java | core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.metadata;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.utils.Utils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
/**
* Populates metadata definitions within workflow objects. Benefits of loading and populating
* metadata definitions upfront could be:
*
* <ul>
* <li>Immutable definitions within a workflow execution with the added benefit of guaranteeing
* consistency at runtime.
* <li>Stress is reduced on the storage layer
* </ul>
*/
@Component
public class MetadataMapperService {
public static final Logger LOGGER = LoggerFactory.getLogger(MetadataMapperService.class);
private final MetadataDAO metadataDAO;
public MetadataMapperService(MetadataDAO metadataDAO) {
this.metadataDAO = metadataDAO;
}
public WorkflowDef lookupForWorkflowDefinition(String name, Integer version) {
Optional<WorkflowDef> potentialDef =
version == null
? lookupLatestWorkflowDefinition(name)
: lookupWorkflowDefinition(name, version);
// Check if the workflow definition is valid
return potentialDef.orElseThrow(
() -> {
LOGGER.error(
"There is no workflow defined with name {} and version {}",
name,
version);
return new NotFoundException(
"No such workflow defined. name=%s, version=%s", name, version);
});
}
@VisibleForTesting
Optional<WorkflowDef> lookupWorkflowDefinition(String workflowName, int workflowVersion) {
Utils.checkArgument(
StringUtils.isNotBlank(workflowName),
"Workflow name must be specified when searching for a definition");
return metadataDAO.getWorkflowDef(workflowName, workflowVersion);
}
@VisibleForTesting
Optional<WorkflowDef> lookupLatestWorkflowDefinition(String workflowName) {
Utils.checkArgument(
StringUtils.isNotBlank(workflowName),
"Workflow name must be specified when searching for a definition");
return metadataDAO.getLatestWorkflowDef(workflowName);
}
public WorkflowModel populateWorkflowWithDefinitions(WorkflowModel workflow) {
Utils.checkNotNull(workflow, "workflow cannot be null");
WorkflowDef workflowDefinition =
Optional.ofNullable(workflow.getWorkflowDefinition())
.orElseGet(
() -> {
WorkflowDef wd =
lookupForWorkflowDefinition(
workflow.getWorkflowName(),
workflow.getWorkflowVersion());
workflow.setWorkflowDefinition(wd);
return wd;
});
workflowDefinition.collectTasks().forEach(this::populateWorkflowTaskWithDefinition);
checkNotEmptyDefinitions(workflowDefinition);
return workflow;
}
public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) {
Utils.checkNotNull(workflowDefinition, "workflowDefinition cannot be null");
workflowDefinition.collectTasks().forEach(this::populateWorkflowTaskWithDefinition);
checkNotEmptyDefinitions(workflowDefinition);
return workflowDefinition;
}
private void populateWorkflowTaskWithDefinition(WorkflowTask workflowTask) {
Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null");
if (shouldPopulateTaskDefinition(workflowTask)) {
workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName()));
if (workflowTask.getTaskDefinition() == null
&& workflowTask.getType().equals(TaskType.SIMPLE.name())) {
// ad-hoc task def
workflowTask.setTaskDefinition(new TaskDef(workflowTask.getName()));
}
}
if (workflowTask.getType().equals(TaskType.SUB_WORKFLOW.name())) {
populateVersionForSubWorkflow(workflowTask);
}
}
private void populateVersionForSubWorkflow(WorkflowTask workflowTask) {
Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null");
SubWorkflowParams subworkflowParams = workflowTask.getSubWorkflowParam();
if (subworkflowParams.getVersion() == null) {
String subWorkflowName = subworkflowParams.getName();
Integer subWorkflowVersion =
metadataDAO
.getLatestWorkflowDef(subWorkflowName)
.map(WorkflowDef::getVersion)
.orElseThrow(
() -> {
String reason =
String.format(
"The Task %s defined as a sub-workflow has no workflow definition available ",
subWorkflowName);
LOGGER.error(reason);
return new TerminateWorkflowException(reason);
});
subworkflowParams.setVersion(subWorkflowVersion);
}
}
private void checkNotEmptyDefinitions(WorkflowDef workflowDefinition) {
Utils.checkNotNull(workflowDefinition, "WorkflowDefinition cannot be null");
// Obtain the names of the tasks with missing definitions
Set<String> missingTaskDefinitionNames =
workflowDefinition.collectTasks().stream()
.filter(
workflowTask ->
workflowTask.getType().equals(TaskType.SIMPLE.name()))
.filter(this::shouldPopulateTaskDefinition)
.map(WorkflowTask::getName)
.collect(Collectors.toSet());
if (!missingTaskDefinitionNames.isEmpty()) {
LOGGER.error(
"Cannot find the task definitions for the following tasks used in workflow: {}",
missingTaskDefinitionNames);
Monitors.recordWorkflowStartError(
workflowDefinition.getName(), WorkflowContext.get().getClientApp());
throw new IllegalArgumentException(
"Cannot find the task definitions for the following tasks used in workflow: "
+ missingTaskDefinitionNames);
}
}
public TaskModel populateTaskWithDefinition(TaskModel task) {
Utils.checkNotNull(task, "Task cannot be null");
populateWorkflowTaskWithDefinition(task.getWorkflowTask());
return task;
}
@VisibleForTesting
boolean shouldPopulateTaskDefinition(WorkflowTask workflowTask) {
Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null");
Utils.checkNotNull(workflowTask.getType(), "WorkflowTask type cannot be null");
return workflowTask.getTaskDefinition() == null
&& StringUtils.isNotBlank(workflowTask.getName());
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/exception/ConflictException.java | core/src/main/java/com/netflix/conductor/core/exception/ConflictException.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.exception;
public class ConflictException extends RuntimeException {
public ConflictException(String message) {
super(message);
}
public ConflictException(String message, Object... args) {
super(String.format(message, args));
}
public ConflictException(String message, Throwable cause) {
super(message, cause);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/exception/NonTransientException.java | core/src/main/java/com/netflix/conductor/core/exception/NonTransientException.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.exception;
public class NonTransientException extends RuntimeException {
public NonTransientException(String message) {
super(message);
}
public NonTransientException(String message, Throwable cause) {
super(message, cause);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/exception/NotFoundException.java | core/src/main/java/com/netflix/conductor/core/exception/NotFoundException.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.exception;
public class NotFoundException extends RuntimeException {
public NotFoundException(String message) {
super(message);
}
public NotFoundException(String message, Object... args) {
super(String.format(message, args));
}
public NotFoundException(String message, Throwable cause) {
super(message, cause);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/exception/TerminateWorkflowException.java | core/src/main/java/com/netflix/conductor/core/exception/TerminateWorkflowException.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.exception;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.model.WorkflowModel.Status.FAILED;
public class TerminateWorkflowException extends RuntimeException {
private final WorkflowModel.Status workflowStatus;
private final TaskModel task;
public TerminateWorkflowException(String reason) {
this(reason, FAILED);
}
public TerminateWorkflowException(String reason, WorkflowModel.Status workflowStatus) {
this(reason, workflowStatus, null);
}
public TerminateWorkflowException(
String reason, WorkflowModel.Status workflowStatus, TaskModel task) {
super(reason);
this.workflowStatus = workflowStatus;
this.task = task;
}
public WorkflowModel.Status getWorkflowStatus() {
return workflowStatus;
}
public TaskModel getTask() {
return task;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/exception/TransientException.java | core/src/main/java/com/netflix/conductor/core/exception/TransientException.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.exception;
public class TransientException extends RuntimeException {
public TransientException(String message) {
super(message);
}
public TransientException(String message, Throwable cause) {
super(message, cause);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutorOps.java | core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutorOps.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.*;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.WorkflowContext;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.exception.*;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.execution.tasks.Terminate;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.listener.TaskStatusListener;
import com.netflix.conductor.core.listener.WorkflowStatusListener;
import com.netflix.conductor.core.listener.WorkflowStatusListener.WorkflowEventType;
import com.netflix.conductor.core.metadata.MetadataMapperService;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.core.utils.Utils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.ExecutionLockService;
import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE;
import static com.netflix.conductor.model.TaskModel.Status.*;
/** Workflow services provider interface */
@Trace
@Component
public class WorkflowExecutorOps implements WorkflowExecutor {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowExecutor.class);
private static final int EXPEDITED_PRIORITY = 10;
private static final String CLASS_NAME = WorkflowExecutor.class.getSimpleName();
private static final Predicate<TaskModel> UNSUCCESSFUL_TERMINAL_TASK =
task -> !task.getStatus().isSuccessful() && task.getStatus().isTerminal();
private static final Predicate<TaskModel> UNSUCCESSFUL_JOIN_TASK =
UNSUCCESSFUL_TERMINAL_TASK.and(t -> TaskType.TASK_TYPE_JOIN.equals(t.getTaskType()));
private static final Predicate<TaskModel> NON_TERMINAL_TASK =
task -> !task.getStatus().isTerminal();
private final MetadataDAO metadataDAO;
private final QueueDAO queueDAO;
private final DeciderService deciderService;
private final ConductorProperties properties;
private final MetadataMapperService metadataMapperService;
private final ExecutionDAOFacade executionDAOFacade;
private final ParametersUtils parametersUtils;
private final IDGenerator idGenerator;
private final WorkflowStatusListener workflowStatusListener;
private final TaskStatusListener taskStatusListener;
private final SystemTaskRegistry systemTaskRegistry;
private long activeWorkerLastPollMs;
private final ExecutionLockService executionLockService;
private final Predicate<PollData> validateLastPolledTime =
pollData ->
pollData.getLastPollTime()
> System.currentTimeMillis() - activeWorkerLastPollMs;
public WorkflowExecutorOps(
DeciderService deciderService,
MetadataDAO metadataDAO,
QueueDAO queueDAO,
MetadataMapperService metadataMapperService,
WorkflowStatusListener workflowStatusListener,
TaskStatusListener taskStatusListener,
ExecutionDAOFacade executionDAOFacade,
ConductorProperties properties,
ExecutionLockService executionLockService,
SystemTaskRegistry systemTaskRegistry,
ParametersUtils parametersUtils,
IDGenerator idGenerator) {
this.deciderService = deciderService;
this.metadataDAO = metadataDAO;
this.queueDAO = queueDAO;
this.properties = properties;
this.metadataMapperService = metadataMapperService;
this.executionDAOFacade = executionDAOFacade;
this.activeWorkerLastPollMs = properties.getActiveWorkerLastPollTimeout().toMillis();
this.workflowStatusListener = workflowStatusListener;
this.taskStatusListener = taskStatusListener;
this.executionLockService = executionLockService;
this.parametersUtils = parametersUtils;
this.idGenerator = idGenerator;
this.systemTaskRegistry = systemTaskRegistry;
}
/**
* @param workflowId the id of the workflow for which task callbacks are to be reset
* @throws ConflictException if the workflow is in terminal state
*/
@Override
public void resetCallbacksForWorkflow(String workflowId) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (workflow.getStatus().isTerminal()) {
throw new ConflictException(
"Workflow is in terminal state. Status = %s", workflow.getStatus());
}
// Get SIMPLE tasks in SCHEDULED state that have callbackAfterSeconds > 0 and
// set the
// callbackAfterSeconds to 0
workflow.getTasks().stream()
.filter(
task ->
!systemTaskRegistry.isSystemTask(task.getTaskType())
&& SCHEDULED == task.getStatus()
&& task.getCallbackAfterSeconds() > 0)
.forEach(
task -> {
if (queueDAO.resetOffsetTime(
QueueUtils.getQueueName(task), task.getTaskId())) {
task.setCallbackAfterSeconds(0);
executionDAOFacade.updateTask(task);
}
});
}
@Override
public String rerun(RerunWorkflowRequest request) {
Utils.checkNotNull(request.getReRunFromWorkflowId(), "reRunFromWorkflowId is missing");
if (!rerunWF(
request.getReRunFromWorkflowId(),
request.getReRunFromTaskId(),
request.getTaskInput(),
request.getWorkflowInput(),
request.getCorrelationId())) {
throw new IllegalArgumentException(
"Task " + request.getReRunFromTaskId() + " not found");
}
return request.getReRunFromWorkflowId();
}
/**
* @param workflowId the id of the workflow to be restarted
* @param useLatestDefinitions if true, use the latest workflow and task definitions upon
* restart
* @throws ConflictException Workflow is not in a terminal state.
* @throws NotFoundException Workflow definition is not found or Workflow is deemed
* non-restartable as per workflow definition.
*/
@Override
public void restart(String workflowId, boolean useLatestDefinitions) {
final WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
String errorMsg =
String.format(
"Workflow: %s is not in terminal state, unable to restart.", workflow);
LOGGER.error(errorMsg);
throw new ConflictException(errorMsg);
}
WorkflowDef workflowDef;
if (useLatestDefinitions) {
workflowDef =
metadataDAO
.getLatestWorkflowDef(workflow.getWorkflowName())
.orElseThrow(
() ->
new NotFoundException(
"Unable to find latest definition for %s",
workflowId));
workflow.setWorkflowDefinition(workflowDef);
workflowDef = metadataMapperService.populateTaskDefinitions(workflowDef);
} else {
workflowDef =
Optional.ofNullable(workflow.getWorkflowDefinition())
.orElseGet(
() ->
metadataDAO
.getWorkflowDef(
workflow.getWorkflowName(),
workflow.getWorkflowVersion())
.orElseThrow(
() ->
new NotFoundException(
"Unable to find definition for %s",
workflowId)));
}
if (!workflowDef.isRestartable()
&& workflow.getStatus()
.equals(
WorkflowModel.Status
.COMPLETED)) { // Can only restart non-completed workflows
// when the configuration is set to false
throw new NotFoundException("Workflow: %s is non-restartable", workflow);
}
// Reset the workflow in the primary datastore and remove from indexer; then
// re-create it
executionDAOFacade.resetWorkflow(workflowId);
workflow.getTasks().clear();
workflow.setReasonForIncompletion(null);
workflow.setFailedTaskId(null);
workflow.setCreateTime(System.currentTimeMillis());
workflow.setEndTime(0);
workflow.setLastRetriedTime(0);
// Change the status to running
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setOutput(null);
workflow.setExternalOutputPayloadStoragePath(null);
try {
executionDAOFacade.createWorkflow(workflow);
// Notify on workflow started.
notifyWorkflowStatusListener(workflow, WorkflowEventType.RESTARTED);
} catch (Exception e) {
Monitors.recordWorkflowStartError(
workflowDef.getName(), WorkflowContext.get().getClientApp());
LOGGER.error("Unable to restart workflow: {}", workflowDef.getName(), e);
terminateWorkflow(workflowId, "Error when restarting the workflow");
throw e;
}
metadataMapperService.populateWorkflowWithDefinitions(workflow);
decide(workflowId);
updateAndPushParents(workflow, "restarted");
}
/**
* Gets the last instance of each failed task and reschedule each Gets all cancelled tasks and
* schedule all of them except JOIN (join should change status to INPROGRESS) Switch workflow
* back to RUNNING status and call decider.
*
* @param workflowId the id of the workflow to be retried
*/
@Override
public void retry(String workflowId, boolean resumeSubworkflowTasks) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (!workflow.getStatus().isTerminal()) {
throw new NotFoundException(
"Workflow is still running. status=%s", workflow.getStatus());
}
if (workflow.getTasks().isEmpty()) {
throw new ConflictException("Workflow has not started yet");
}
if (resumeSubworkflowTasks) {
Optional<TaskModel> taskToRetry =
workflow.getTasks().stream().filter(UNSUCCESSFUL_TERMINAL_TASK).findFirst();
if (taskToRetry.isPresent()) {
workflow = findLastFailedSubWorkflowIfAny(taskToRetry.get(), workflow);
retry(workflow);
updateAndPushParents(workflow, "retried");
}
} else {
retry(workflow);
updateAndPushParents(workflow, "retried");
}
}
private void updateAndPushParents(WorkflowModel workflow, String operation) {
String workflowIdentifier = "";
while (workflow.hasParent()) {
// update parent's sub workflow task
TaskModel subWorkflowTask =
executionDAOFacade.getTaskModel(workflow.getParentWorkflowTaskId());
if (subWorkflowTask.getWorkflowTask().isOptional()) {
// break out
LOGGER.info(
"Sub workflow task {} is optional, skip updating parents", subWorkflowTask);
break;
}
subWorkflowTask.setSubworkflowChanged(true);
subWorkflowTask.setStatus(IN_PROGRESS);
executionDAOFacade.updateTask(subWorkflowTask);
// add an execution log
String currentWorkflowIdentifier = workflow.toShortString();
workflowIdentifier =
!workflowIdentifier.equals("")
? String.format(
"%s -> %s", currentWorkflowIdentifier, workflowIdentifier)
: currentWorkflowIdentifier;
TaskExecLog log =
new TaskExecLog(
String.format("Sub workflow %s %s.", workflowIdentifier, operation));
log.setTaskId(subWorkflowTask.getTaskId());
executionDAOFacade.addTaskExecLog(Collections.singletonList(log));
LOGGER.info("Task {} updated. {}", log.getTaskId(), log.getLog());
// push the parent workflow to decider queue for asynchronous 'decide'
String parentWorkflowId = workflow.getParentWorkflowId();
WorkflowModel parentWorkflow =
executionDAOFacade.getWorkflowModel(parentWorkflowId, true);
parentWorkflow.setStatus(WorkflowModel.Status.RUNNING);
parentWorkflow.setLastRetriedTime(System.currentTimeMillis());
executionDAOFacade.updateWorkflow(parentWorkflow);
try {
WorkflowStatusListener.WorkflowEventType event =
WorkflowStatusListener.WorkflowEventType.valueOf(operation.toUpperCase());
notifyWorkflowStatusListener(parentWorkflow, event);
} catch (IllegalArgumentException e) {
LOGGER.warn("Unknown workflow operation: {}", operation);
}
expediteLazyWorkflowEvaluation(parentWorkflowId);
workflow = parentWorkflow;
}
}
private void retry(WorkflowModel workflow) {
// Get all FAILED or CANCELED tasks that are not COMPLETED (or reach other
// terminal states)
// on further executions.
// // Eg: for Seq of tasks task1.CANCELED, task1.COMPLETED, task1 shouldn't be
// retried.
// Throw an exception if there are no FAILED tasks.
// Handle JOIN task CANCELED status as special case.
Map<String, TaskModel> retriableMap = new HashMap<>();
for (TaskModel task : workflow.getTasks()) {
switch (task.getStatus()) {
case FAILED:
if (task.getTaskType().equalsIgnoreCase(TaskType.JOIN.toString())
|| task.getTaskType()
.equalsIgnoreCase(TaskType.EXCLUSIVE_JOIN.toString())) {
@SuppressWarnings("unchecked")
List<String> joinOn = (List<String>) task.getInputData().get("joinOn");
boolean joinOnFailedPermissive = isJoinOnFailedPermissive(joinOn, workflow);
if (joinOnFailedPermissive) {
task.setStatus(IN_PROGRESS);
addTaskToQueue(task);
break;
}
}
case FAILED_WITH_TERMINAL_ERROR:
case TIMED_OUT:
retriableMap.put(task.getReferenceTaskName(), task);
break;
case CANCELED:
if (task.getTaskType().equalsIgnoreCase(TaskType.JOIN.toString())
|| task.getTaskType().equalsIgnoreCase(TaskType.DO_WHILE.toString())) {
task.setStatus(IN_PROGRESS);
addTaskToQueue(task);
// Task doesn't have to be updated yet. Will be updated along with other
// Workflow tasks downstream.
} else {
retriableMap.put(task.getReferenceTaskName(), task);
}
break;
default:
retriableMap.remove(task.getReferenceTaskName());
break;
}
}
// if workflow TIMED_OUT due to timeoutSeconds configured in the workflow
// definition,
// it may not have any unsuccessful tasks that can be retried
if (retriableMap.values().size() == 0
&& workflow.getStatus() != WorkflowModel.Status.TIMED_OUT) {
throw new ConflictException(
"There are no retryable tasks! Use restart if you want to attempt entire workflow execution again.");
}
// Update Workflow with new status.
// This should load Workflow from archive, if archived.
workflow.setStatus(WorkflowModel.Status.RUNNING);
workflow.setLastRetriedTime(System.currentTimeMillis());
String lastReasonForIncompletion = workflow.getReasonForIncompletion();
workflow.setReasonForIncompletion(null);
// Add to decider queue
queueDAO.push(
DECIDER_QUEUE,
workflow.getWorkflowId(),
workflow.getPriority(),
properties.getWorkflowOffsetTimeout().getSeconds());
executionDAOFacade.updateWorkflow(workflow);
notifyWorkflowStatusListener(workflow, WorkflowEventType.RETRIED);
LOGGER.info(
"Workflow {} that failed due to '{}' was retried",
workflow.toShortString(),
lastReasonForIncompletion);
// taskToBeRescheduled would set task `retried` to true, and hence it's
// important to
// updateTasks after obtaining task copy from taskToBeRescheduled.
final WorkflowModel finalWorkflow = workflow;
List<TaskModel> retriableTasks =
retriableMap.values().stream()
.sorted(Comparator.comparingInt(TaskModel::getSeq))
.map(task -> taskToBeRescheduled(finalWorkflow, task))
.collect(Collectors.toList());
dedupAndAddTasks(workflow, retriableTasks);
// Note: updateTasks before updateWorkflow might fail when Workflow is archived
// and doesn't
// exist in primary store.
executionDAOFacade.updateTasks(workflow.getTasks());
scheduleTask(workflow, retriableTasks);
}
private WorkflowModel findLastFailedSubWorkflowIfAny(
TaskModel task, WorkflowModel parentWorkflow) {
if (TaskType.TASK_TYPE_SUB_WORKFLOW.equals(task.getTaskType())
&& UNSUCCESSFUL_TERMINAL_TASK.test(task)) {
WorkflowModel subWorkflow =
executionDAOFacade.getWorkflowModel(task.getSubWorkflowId(), true);
Optional<TaskModel> taskToRetry =
subWorkflow.getTasks().stream().filter(UNSUCCESSFUL_TERMINAL_TASK).findFirst();
if (taskToRetry.isPresent()) {
return findLastFailedSubWorkflowIfAny(taskToRetry.get(), subWorkflow);
}
}
return parentWorkflow;
}
/**
* Reschedule a task
*
* @param task failed or cancelled task
* @return new instance of a task with "SCHEDULED" status
*/
private TaskModel taskToBeRescheduled(WorkflowModel workflow, TaskModel task) {
TaskModel taskToBeRetried = task.copy();
taskToBeRetried.setTaskId(idGenerator.generate());
taskToBeRetried.setRetriedTaskId(task.getTaskId());
taskToBeRetried.setStatus(SCHEDULED);
taskToBeRetried.setRetryCount(task.getRetryCount() + 1);
taskToBeRetried.setRetried(false);
taskToBeRetried.setPollCount(0);
taskToBeRetried.setCallbackAfterSeconds(0);
taskToBeRetried.setSubWorkflowId(null);
taskToBeRetried.setScheduledTime(0);
taskToBeRetried.setStartTime(0);
taskToBeRetried.setEndTime(0);
taskToBeRetried.setWorkerId(null);
taskToBeRetried.setReasonForIncompletion(null);
taskToBeRetried.setSeq(0);
// perform parameter replacement for retried task
Map<String, Object> taskInput =
parametersUtils.getTaskInput(
taskToBeRetried.getWorkflowTask().getInputParameters(),
workflow,
taskToBeRetried.getWorkflowTask().getTaskDefinition(),
taskToBeRetried.getTaskId());
taskToBeRetried.getInputData().putAll(taskInput);
task.setRetried(true);
// since this task is being retried and a retry has been computed, task
// lifecycle is
// complete
task.setExecuted(true);
return taskToBeRetried;
}
private void endExecution(WorkflowModel workflow, TaskModel terminateTask) {
boolean raiseFinalizedNotification = false;
if (terminateTask != null) {
String terminationStatus =
(String)
terminateTask
.getInputData()
.get(Terminate.getTerminationStatusParameter());
String reason =
(String)
terminateTask
.getInputData()
.get(Terminate.getTerminationReasonParameter());
if (StringUtils.isBlank(reason)) {
reason =
String.format(
"Workflow is %s by TERMINATE task: %s",
terminationStatus, terminateTask.getTaskId());
}
if (WorkflowModel.Status.FAILED.name().equals(terminationStatus)) {
workflow.setStatus(WorkflowModel.Status.FAILED);
workflow =
terminate(
workflow,
new TerminateWorkflowException(
reason, workflow.getStatus(), terminateTask));
} else {
workflow.setReasonForIncompletion(reason);
workflow = completeWorkflow(workflow);
raiseFinalizedNotification = true;
}
} else {
workflow = completeWorkflow(workflow);
raiseFinalizedNotification = true;
}
cancelNonTerminalTasks(workflow, raiseFinalizedNotification);
}
/**
* @param workflow the workflow to be completed
* @throws ConflictException if workflow is already in terminal state.
*/
@VisibleForTesting
WorkflowModel completeWorkflow(WorkflowModel workflow) {
LOGGER.debug("Completing workflow execution for {}", workflow.getWorkflowId());
if (workflow.getStatus().equals(WorkflowModel.Status.COMPLETED)) {
queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); // remove from the sweep queue
executionDAOFacade.removeFromPendingWorkflow(
workflow.getWorkflowName(), workflow.getWorkflowId());
LOGGER.debug("Workflow: {} has already been completed.", workflow.getWorkflowId());
return workflow;
}
if (workflow.getStatus().isTerminal()) {
String msg =
"Workflow is already in terminal state. Current status: "
+ workflow.getStatus();
throw new ConflictException(msg);
}
deciderService.updateWorkflowOutput(workflow, null);
workflow.setStatus(WorkflowModel.Status.COMPLETED);
// update the failed reference task names
List<TaskModel> failedTasks =
workflow.getTasks().stream()
.filter(
t ->
FAILED.equals(t.getStatus())
|| FAILED_WITH_TERMINAL_ERROR.equals(t.getStatus()))
.collect(Collectors.toList());
workflow.getFailedReferenceTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toSet()));
workflow.getFailedTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toSet()));
executionDAOFacade.updateWorkflow(workflow);
LOGGER.debug("Completed workflow execution for {}", workflow.getWorkflowId());
notifyWorkflowStatusListener(workflow, WorkflowEventType.COMPLETED);
Monitors.recordWorkflowCompletion(
workflow.getWorkflowName(),
workflow.getEndTime() - workflow.getCreateTime(),
workflow.getOwnerApp());
if (workflow.hasParent()) {
updateParentWorkflowTask(workflow);
LOGGER.info(
"{} updated parent {} task {}",
workflow.toShortString(),
workflow.getParentWorkflowId(),
workflow.getParentWorkflowTaskId());
expediteLazyWorkflowEvaluation(workflow.getParentWorkflowId());
}
executionLockService.releaseLock(workflow.getWorkflowId());
executionLockService.deleteLock(workflow.getWorkflowId());
return workflow;
}
@Override
public void terminateWorkflow(String workflowId, String reason) {
WorkflowModel workflow = executionDAOFacade.getWorkflowModel(workflowId, true);
if (WorkflowModel.Status.COMPLETED.equals(workflow.getStatus())) {
throw new ConflictException("Cannot terminate a COMPLETED workflow.");
}
workflow.setStatus(WorkflowModel.Status.TERMINATED);
terminateWorkflow(workflow, reason, null);
}
/**
* @param workflow the workflow to be terminated
* @param reason the reason for termination
* @param failureWorkflow the failure workflow (if any) to be triggered as a result of this
* termination
*/
@Override
public WorkflowModel terminateWorkflow(
WorkflowModel workflow, String reason, String failureWorkflow) {
try {
executionLockService.acquireLock(workflow.getWorkflowId(), 60000);
if (!workflow.getStatus().isTerminal()) {
workflow.setStatus(WorkflowModel.Status.TERMINATED);
}
try {
deciderService.updateWorkflowOutput(workflow, null);
} catch (Exception e) {
// catch any failure in this step and continue the execution of terminating
// workflow
LOGGER.error(
"Failed to update output data for workflow: {}",
workflow.getWorkflowId(),
e);
Monitors.error(CLASS_NAME, "terminateWorkflow");
}
// update the failed reference task names
List<TaskModel> failedTasks =
workflow.getTasks().stream()
.filter(
t ->
FAILED.equals(t.getStatus())
|| FAILED_WITH_TERMINAL_ERROR.equals(
t.getStatus()))
.collect(Collectors.toList());
workflow.getFailedReferenceTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toSet()));
workflow.getFailedTaskNames()
.addAll(
failedTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toSet()));
String workflowId = workflow.getWorkflowId();
workflow.setReasonForIncompletion(reason);
executionDAOFacade.updateWorkflow(workflow);
notifyWorkflowStatusListener(workflow, WorkflowEventType.TERMINATED);
Monitors.recordWorkflowTermination(
workflow.getWorkflowName(), workflow.getStatus(), workflow.getOwnerApp());
LOGGER.info("Workflow {} is terminated because of {}", workflowId, reason);
List<TaskModel> tasks = workflow.getTasks();
try {
// Remove from the task queue if they were there
tasks.forEach(
task -> queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId()));
} catch (Exception e) {
LOGGER.warn(
"Error removing task(s) from queue during workflow termination : {}",
workflowId,
e);
}
if (workflow.hasParent()) {
updateParentWorkflowTask(workflow);
LOGGER.info(
"{} updated parent {} task {}",
workflow.toShortString(),
workflow.getParentWorkflowId(),
workflow.getParentWorkflowTaskId());
expediteLazyWorkflowEvaluation(workflow.getParentWorkflowId());
}
if (!StringUtils.isBlank(failureWorkflow)) {
Map<String, Object> input = new HashMap<>(workflow.getInput());
input.put("workflowId", workflowId);
input.put("reason", reason);
input.put("failureStatus", workflow.getStatus().toString());
if (workflow.getFailedTaskId() != null) {
input.put("failureTaskId", workflow.getFailedTaskId());
}
input.put("failedWorkflow", workflow);
try {
String failureWFId = idGenerator.generate();
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setName(failureWorkflow);
startWorkflowInput.setWorkflowInput(input);
startWorkflowInput.setCorrelationId(workflow.getCorrelationId());
startWorkflowInput.setTaskToDomain(workflow.getTaskToDomain());
startWorkflowInput.setWorkflowId(failureWFId);
startWorkflowInput.setTriggeringWorkflowId(workflowId);
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | true |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/AsyncSystemTaskExecutor.java | core/src/main/java/com/netflix/conductor/core/execution/AsyncSystemTaskExecutor.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.dal.ExecutionDAOFacade;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.dao.QueueDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
@Component
public class AsyncSystemTaskExecutor {
private final ExecutionDAOFacade executionDAOFacade;
private final QueueDAO queueDAO;
private final MetadataDAO metadataDAO;
private final long queueTaskMessagePostponeSecs;
private final long systemTaskCallbackTime;
private final WorkflowExecutor workflowExecutor;
private static final Logger LOGGER = LoggerFactory.getLogger(AsyncSystemTaskExecutor.class);
public AsyncSystemTaskExecutor(
ExecutionDAOFacade executionDAOFacade,
QueueDAO queueDAO,
MetadataDAO metadataDAO,
ConductorProperties conductorProperties,
WorkflowExecutor workflowExecutor) {
this.executionDAOFacade = executionDAOFacade;
this.queueDAO = queueDAO;
this.metadataDAO = metadataDAO;
this.workflowExecutor = workflowExecutor;
this.systemTaskCallbackTime =
conductorProperties.getSystemTaskWorkerCallbackDuration().getSeconds();
this.queueTaskMessagePostponeSecs =
conductorProperties.getTaskExecutionPostponeDuration().getSeconds();
}
/**
* Executes and persists the results of an async {@link WorkflowSystemTask}.
*
* @param systemTask The {@link WorkflowSystemTask} to be executed.
* @param taskId The id of the {@link TaskModel} object.
*/
public void execute(WorkflowSystemTask systemTask, String taskId) {
TaskModel task = loadTaskQuietly(taskId);
if (task == null) {
LOGGER.error("TaskId: {} could not be found while executing {}", taskId, systemTask);
try {
LOGGER.debug(
"Cleaning up dead task from queue message: taskQueue={}, taskId={}",
systemTask.getTaskType(),
taskId);
queueDAO.remove(systemTask.getTaskType(), taskId);
} catch (Exception e) {
LOGGER.error(
"Failed to remove dead task from queue message: taskQueue={}, taskId={}",
systemTask.getTaskType(),
taskId);
}
return;
}
LOGGER.debug("Task: {} fetched from execution DAO for taskId: {}", task, taskId);
String queueName = QueueUtils.getQueueName(task);
if (task.getStatus().isTerminal()) {
// Tune the SystemTaskWorkerCoordinator's queues - if the queue size is very big this
// can happen!
LOGGER.info("Task {}/{} was already completed.", task.getTaskType(), task.getTaskId());
queueDAO.remove(queueName, task.getTaskId());
return;
}
if (task.getStatus().equals(TaskModel.Status.SCHEDULED)) {
if (executionDAOFacade.exceedsInProgressLimit(task)) {
LOGGER.warn(
"Concurrent Execution limited for {}:{}", taskId, task.getTaskDefName());
postponeQuietly(queueName, task);
return;
}
if (task.getRateLimitPerFrequency() > 0
&& executionDAOFacade.exceedsRateLimitPerFrequency(
task, metadataDAO.getTaskDef(task.getTaskDefName()))) {
LOGGER.warn(
"RateLimit Execution limited for {}:{}, limit:{}",
taskId,
task.getTaskDefName(),
task.getRateLimitPerFrequency());
postponeQuietly(queueName, task);
return;
}
}
boolean hasTaskExecutionCompleted = false;
boolean shouldRemoveTaskFromQueue = false;
String workflowId = task.getWorkflowInstanceId();
// if we are here the Task object is updated and needs to be persisted regardless of an
// exception
try {
WorkflowModel workflow =
executionDAOFacade.getWorkflowModel(
workflowId, systemTask.isTaskRetrievalRequired());
if (workflow.getStatus().isTerminal()) {
LOGGER.info(
"Workflow {} has been completed for {}/{}",
workflow.toShortString(),
systemTask,
task.getTaskId());
if (!task.getStatus().isTerminal()) {
task.setStatus(TaskModel.Status.CANCELED);
task.setReasonForIncompletion(
String.format(
"Workflow is in %s state", workflow.getStatus().toString()));
}
shouldRemoveTaskFromQueue = true;
return;
}
LOGGER.debug(
"Executing {}/{} in {} state",
task.getTaskType(),
task.getTaskId(),
task.getStatus());
boolean isTaskAsyncComplete = systemTask.isAsyncComplete(task);
if (task.getStatus() == TaskModel.Status.SCHEDULED || !isTaskAsyncComplete) {
task.incrementPollCount();
}
if (task.getStatus() == TaskModel.Status.SCHEDULED) {
task.setStartTime(System.currentTimeMillis());
Monitors.recordQueueWaitTime(task.getTaskType(), task.getQueueWaitTime());
systemTask.start(workflow, task, workflowExecutor);
} else if (task.getStatus() == TaskModel.Status.IN_PROGRESS) {
systemTask.execute(workflow, task, workflowExecutor);
}
// Update message in Task queue based on Task status
// Remove asyncComplete system tasks from the queue that are not in SCHEDULED state
if (isTaskAsyncComplete && task.getStatus() != TaskModel.Status.SCHEDULED) {
shouldRemoveTaskFromQueue = true;
hasTaskExecutionCompleted = true;
} else if (task.getStatus().isTerminal()) {
task.setEndTime(System.currentTimeMillis());
shouldRemoveTaskFromQueue = true;
hasTaskExecutionCompleted = true;
} else {
task.setCallbackAfterSeconds(systemTaskCallbackTime);
systemTask
.getEvaluationOffset(task, systemTaskCallbackTime)
.ifPresentOrElse(
task::setCallbackAfterSeconds,
() -> task.setCallbackAfterSeconds(systemTaskCallbackTime));
queueDAO.postpone(
queueName,
task.getTaskId(),
task.getWorkflowPriority(),
task.getCallbackAfterSeconds());
LOGGER.debug("{} postponed in queue: {}", task, queueName);
}
LOGGER.debug(
"Finished execution of {}/{}-{}",
systemTask,
task.getTaskId(),
task.getStatus());
} catch (Exception e) {
Monitors.error(AsyncSystemTaskExecutor.class.getSimpleName(), "executeSystemTask");
LOGGER.error("Error executing system task - {}, with id: {}", systemTask, taskId, e);
} finally {
executionDAOFacade.updateTask(task);
if (shouldRemoveTaskFromQueue) {
queueDAO.remove(queueName, task.getTaskId());
LOGGER.debug("{} removed from queue: {}", task, queueName);
}
// if the current task execution has completed, then the workflow needs to be evaluated
if (hasTaskExecutionCompleted) {
workflowExecutor.decide(workflowId);
}
}
}
private void postponeQuietly(String queueName, TaskModel task) {
try {
queueDAO.postpone(
queueName,
task.getTaskId(),
task.getWorkflowPriority(),
queueTaskMessagePostponeSecs);
} catch (Exception e) {
LOGGER.error("Error postponing task: {} in queue: {}", task.getTaskId(), queueName);
}
}
private TaskModel loadTaskQuietly(String taskId) {
try {
return executionDAOFacade.getTaskModel(taskId);
} catch (Exception e) {
return null;
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java | core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.time.Duration;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.utils.ExternalPayloadStorage.Operation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.mapper.TaskMapper;
import com.netflix.conductor.core.execution.mapper.TaskMapperContext;
import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry;
import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.core.utils.ParametersUtils;
import com.netflix.conductor.dao.MetadataDAO;
import com.netflix.conductor.metrics.Monitors;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TERMINATE;
import static com.netflix.conductor.common.metadata.tasks.TaskType.USER_DEFINED;
import static com.netflix.conductor.model.TaskModel.Status.*;
/**
* Decider evaluates the state of the workflow by inspecting the current state along with the
* blueprint. The result of the evaluation is either to schedule further tasks, complete/fail the
* workflow or do nothing.
*/
@Service
@Trace
public class DeciderService {
private static final Logger LOGGER = LoggerFactory.getLogger(DeciderService.class);
private final IDGenerator idGenerator;
private final ParametersUtils parametersUtils;
private final ExternalPayloadStorageUtils externalPayloadStorageUtils;
private final MetadataDAO metadataDAO;
private final SystemTaskRegistry systemTaskRegistry;
private final long taskPendingTimeThresholdMins;
private final Map<String, TaskMapper> taskMappers;
public DeciderService(
IDGenerator idGenerator,
ParametersUtils parametersUtils,
MetadataDAO metadataDAO,
ExternalPayloadStorageUtils externalPayloadStorageUtils,
SystemTaskRegistry systemTaskRegistry,
@Qualifier("taskMappersByTaskType") Map<String, TaskMapper> taskMappers,
@Value("${conductor.app.taskPendingTimeThreshold:60m}")
Duration taskPendingTimeThreshold) {
this.idGenerator = idGenerator;
this.metadataDAO = metadataDAO;
this.parametersUtils = parametersUtils;
this.taskMappers = taskMappers;
this.externalPayloadStorageUtils = externalPayloadStorageUtils;
this.taskPendingTimeThresholdMins = taskPendingTimeThreshold.toMinutes();
this.systemTaskRegistry = systemTaskRegistry;
}
public DeciderOutcome decide(WorkflowModel workflow) throws TerminateWorkflowException {
// In case of a new workflow the list of tasks will be empty.
final List<TaskModel> tasks = workflow.getTasks();
// Filter the list of tasks and include only tasks that are not executed,
// not marked to be skipped and not ready for rerun.
// For a new workflow, the list of unprocessedTasks will be empty
List<TaskModel> unprocessedTasks =
tasks.stream()
.filter(t -> !t.getStatus().equals(SKIPPED) && !t.isExecuted())
.collect(Collectors.toList());
List<TaskModel> tasksToBeScheduled = new LinkedList<>();
if (unprocessedTasks.isEmpty()) {
// this is the flow that the new workflow will go through
tasksToBeScheduled = startWorkflow(workflow);
if (tasksToBeScheduled == null) {
tasksToBeScheduled = new LinkedList<>();
}
}
return decide(workflow, tasksToBeScheduled);
}
private DeciderOutcome decide(final WorkflowModel workflow, List<TaskModel> preScheduledTasks)
throws TerminateWorkflowException {
DeciderOutcome outcome = new DeciderOutcome();
if (workflow.getStatus().isTerminal()) {
// you cannot evaluate a terminal workflow
LOGGER.debug(
"Workflow {} is already finished. Reason: {}",
workflow,
workflow.getReasonForIncompletion());
return outcome;
}
checkWorkflowTimeout(workflow);
if (workflow.getStatus().equals(WorkflowModel.Status.PAUSED)) {
LOGGER.debug("Workflow " + workflow.getWorkflowId() + " is paused");
return outcome;
}
List<TaskModel> pendingTasks = new ArrayList<>();
Set<String> executedTaskRefNames = new HashSet<>();
boolean hasSuccessfulTerminateTask = false;
for (TaskModel task : workflow.getTasks()) {
// Filter the list of tasks and include only tasks that are not retried, not executed
// marked to be skipped and not part of System tasks that is DECISION, FORK, JOIN
// This list will be empty for a new workflow being started
if (!task.isRetried() && !task.getStatus().equals(SKIPPED) && !task.isExecuted()) {
pendingTasks.add(task);
}
// Get all the tasks that have not completed their lifecycle yet
// This list will be empty for a new workflow
if (task.isExecuted()) {
executedTaskRefNames.add(task.getReferenceTaskName());
}
if (TERMINATE.name().equals(task.getTaskType())
&& task.getStatus().isTerminal()
&& task.getStatus().isSuccessful()) {
hasSuccessfulTerminateTask = true;
outcome.terminateTask = task;
}
}
Map<String, TaskModel> tasksToBeScheduled = new LinkedHashMap<>();
preScheduledTasks.forEach(
preScheduledTask -> {
tasksToBeScheduled.put(
preScheduledTask.getReferenceTaskName(), preScheduledTask);
});
// A new workflow does not enter this code branch
for (TaskModel pendingTask : pendingTasks) {
if (systemTaskRegistry.isSystemTask(pendingTask.getTaskType())
&& !pendingTask.getStatus().isTerminal()) {
tasksToBeScheduled.putIfAbsent(pendingTask.getReferenceTaskName(), pendingTask);
executedTaskRefNames.remove(pendingTask.getReferenceTaskName());
}
Optional<TaskDef> taskDefinition = pendingTask.getTaskDefinition();
if (taskDefinition.isEmpty()) {
taskDefinition =
Optional.ofNullable(
workflow.getWorkflowDefinition()
.getTaskByRefName(
pendingTask.getReferenceTaskName()))
.map(WorkflowTask::getTaskDefinition);
}
if (taskDefinition.isPresent()) {
checkTaskTimeout(taskDefinition.get(), pendingTask);
checkTaskPollTimeout(taskDefinition.get(), pendingTask);
// If the task has not been updated for "responseTimeoutSeconds" then mark task as
// TIMED_OUT
if (isResponseTimedOut(taskDefinition.get(), pendingTask)) {
timeoutTask(taskDefinition.get(), pendingTask);
}
}
if (!pendingTask.getStatus().isSuccessful()) {
WorkflowTask workflowTask = pendingTask.getWorkflowTask();
if (workflowTask == null) {
workflowTask =
workflow.getWorkflowDefinition()
.getTaskByRefName(pendingTask.getReferenceTaskName());
}
Optional<TaskModel> retryTask =
retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow);
if (retryTask.isPresent()) {
tasksToBeScheduled.put(retryTask.get().getReferenceTaskName(), retryTask.get());
executedTaskRefNames.remove(retryTask.get().getReferenceTaskName());
outcome.tasksToBeUpdated.add(pendingTask);
} else if (!(pendingTask.getWorkflowTask() != null
&& pendingTask.getWorkflowTask().isPermissive()
&& !pendingTask.getWorkflowTask().isOptional())) {
pendingTask.setStatus(COMPLETED_WITH_ERRORS);
}
}
if (!pendingTask.isExecuted()
&& !pendingTask.isRetried()
&& pendingTask.getStatus().isTerminal()) {
pendingTask.setExecuted(true);
List<TaskModel> nextTasks = getNextTask(workflow, pendingTask);
if (pendingTask.isLoopOverTask()
&& !TaskType.DO_WHILE.name().equals(pendingTask.getTaskType())
&& !nextTasks.isEmpty()) {
nextTasks = filterNextLoopOverTasks(nextTasks, pendingTask, workflow);
}
nextTasks.forEach(
nextTask ->
tasksToBeScheduled.putIfAbsent(
nextTask.getReferenceTaskName(), nextTask));
outcome.tasksToBeUpdated.add(pendingTask);
LOGGER.debug(
"Scheduling Tasks from {}, next = {} for workflowId: {}",
pendingTask.getTaskDefName(),
nextTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toList()),
workflow.getWorkflowId());
}
}
// All the tasks that need to scheduled are added to the outcome, in case of
List<TaskModel> unScheduledTasks =
tasksToBeScheduled.values().stream()
.filter(task -> !executedTaskRefNames.contains(task.getReferenceTaskName()))
.collect(Collectors.toList());
if (!unScheduledTasks.isEmpty()) {
LOGGER.debug(
"Scheduling Tasks: {} for workflow: {}",
unScheduledTasks.stream()
.map(TaskModel::getTaskDefName)
.collect(Collectors.toList()),
workflow.getWorkflowId());
outcome.tasksToBeScheduled.addAll(unScheduledTasks);
}
if (hasSuccessfulTerminateTask
|| (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow))) {
LOGGER.debug("Marking workflow: {} as complete.", workflow);
List<TaskModel> permissiveTasksTerminalNonSuccessful =
workflow.getTasks().stream()
.filter(t -> t.getWorkflowTask() != null)
.filter(t -> t.getWorkflowTask().isPermissive())
.filter(t -> !t.getWorkflowTask().isOptional())
.collect(
Collectors.toMap(
TaskModel::getReferenceTaskName,
t -> t,
(t1, t2) ->
t1.getRetryCount() > t2.getRetryCount()
? t1
: t2))
.values()
.stream()
.filter(
t ->
t.getStatus().isTerminal()
&& !t.getStatus().isSuccessful())
.toList();
if (!permissiveTasksTerminalNonSuccessful.isEmpty()) {
final String errMsg =
permissiveTasksTerminalNonSuccessful.stream()
.map(
t ->
String.format(
"Task %s failed with status: %s and reason: '%s'",
t.getTaskId(),
t.getStatus(),
t.getReasonForIncompletion()))
.collect(Collectors.joining(". "));
throw new TerminateWorkflowException(errMsg);
}
outcome.isComplete = true;
}
return outcome;
}
@VisibleForTesting
List<TaskModel> filterNextLoopOverTasks(
List<TaskModel> tasks, TaskModel pendingTask, WorkflowModel workflow) {
// Update the task reference name and iteration
tasks.forEach(
nextTask -> {
nextTask.setReferenceTaskName(
TaskUtils.appendIteration(
nextTask.getReferenceTaskName(), pendingTask.getIteration()));
nextTask.setIteration(pendingTask.getIteration());
});
List<String> tasksInWorkflow =
workflow.getTasks().stream()
.filter(
runningTask ->
runningTask.getStatus().equals(TaskModel.Status.IN_PROGRESS)
|| runningTask.getStatus().isTerminal())
.map(TaskModel::getReferenceTaskName)
.collect(Collectors.toList());
return tasks.stream()
.filter(
runningTask ->
!tasksInWorkflow.contains(runningTask.getReferenceTaskName()))
.collect(Collectors.toList());
}
private List<TaskModel> startWorkflow(WorkflowModel workflow)
throws TerminateWorkflowException {
final WorkflowDef workflowDef = workflow.getWorkflowDefinition();
LOGGER.debug("Starting workflow: {}", workflow);
// The tasks will be empty in case of new workflow
List<TaskModel> tasks = workflow.getTasks();
// Check if the workflow is a re-run case or if it is a new workflow execution
if (workflow.getReRunFromWorkflowId() == null || tasks.isEmpty()) {
if (workflowDef.getTasks().isEmpty()) {
throw new TerminateWorkflowException(
"No tasks found to be executed", WorkflowModel.Status.COMPLETED);
}
WorkflowTask taskToSchedule =
workflowDef
.getTasks()
.get(0); // Nothing is running yet - so schedule the first task
// Loop until a non-skipped task is found
while (isTaskSkipped(taskToSchedule, workflow)) {
taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName());
}
// In case of a new workflow, the first non-skippable task will be scheduled
return getTasksToBeScheduled(workflow, taskToSchedule, 0);
}
// Get the first task to schedule
TaskModel rerunFromTask =
tasks.stream()
.findFirst()
.map(
task -> {
task.setStatus(SCHEDULED);
task.setRetried(true);
task.setRetryCount(0);
return task;
})
.orElseThrow(
() -> {
String reason =
String.format(
"The workflow %s is marked for re-run from %s but could not find the starting task",
workflow.getWorkflowId(),
workflow.getReRunFromWorkflowId());
return new TerminateWorkflowException(reason);
});
return Collections.singletonList(rerunFromTask);
}
/**
* Updates the workflow output.
*
* @param workflow the workflow instance
* @param task if not null, the output of this task will be copied to workflow output if no
* output parameters are specified in the workflow definition if null, the output of the
* last task in the workflow will be copied to workflow output of no output parameters are
* specified in the workflow definition
*/
void updateWorkflowOutput(final WorkflowModel workflow, TaskModel task) {
List<TaskModel> allTasks = workflow.getTasks();
if (allTasks.isEmpty()) {
return;
}
Map<String, Object> output = new HashMap<>();
Optional<TaskModel> optionalTask =
allTasks.stream()
.filter(
t ->
TaskType.TERMINATE.name().equals(t.getTaskType())
&& t.getStatus().isTerminal()
&& t.getStatus().isSuccessful())
.findFirst();
if (optionalTask.isPresent()) {
TaskModel terminateTask = optionalTask.get();
if (StringUtils.isNotBlank(terminateTask.getExternalOutputPayloadStoragePath())) {
output =
externalPayloadStorageUtils.downloadPayload(
terminateTask.getExternalOutputPayloadStoragePath());
Monitors.recordExternalPayloadStorageUsage(
terminateTask.getTaskDefName(),
Operation.READ.toString(),
PayloadType.TASK_OUTPUT.toString());
} else if (!terminateTask.getOutputData().isEmpty()) {
output = terminateTask.getOutputData();
}
} else {
TaskModel last = Optional.ofNullable(task).orElse(allTasks.get(allTasks.size() - 1));
WorkflowDef workflowDef = workflow.getWorkflowDefinition();
if (workflowDef.getOutputParameters() != null
&& !workflowDef.getOutputParameters().isEmpty()) {
output =
parametersUtils.getTaskInput(
workflowDef.getOutputParameters(), workflow, null, null);
} else if (StringUtils.isNotBlank(last.getExternalOutputPayloadStoragePath())) {
output =
externalPayloadStorageUtils.downloadPayload(
last.getExternalOutputPayloadStoragePath());
Monitors.recordExternalPayloadStorageUsage(
last.getTaskDefName(),
Operation.READ.toString(),
PayloadType.TASK_OUTPUT.toString());
} else {
output = last.getOutputData();
}
}
workflow.setOutput(output);
}
public boolean checkForWorkflowCompletion(final WorkflowModel workflow)
throws TerminateWorkflowException {
Map<String, TaskModel.Status> taskStatusMap = new HashMap<>();
List<TaskModel> nonExecutedTasks = new ArrayList<>();
for (TaskModel task : workflow.getTasks()) {
taskStatusMap.put(task.getReferenceTaskName(), task.getStatus());
if (!task.getStatus().isTerminal()) {
return false;
}
// If there is a TERMINATE task that has been executed successfuly then the workflow
// should be marked as completed.
if (TERMINATE.name().equals(task.getTaskType())
&& task.getStatus().isTerminal()
&& task.getStatus().isSuccessful()) {
return true;
}
if (!task.isRetried() || !task.isExecuted()) {
nonExecutedTasks.add(task);
}
}
// If there are no tasks executed, then we are not done yet
if (taskStatusMap.isEmpty()) {
return false;
}
List<WorkflowTask> workflowTasks = workflow.getWorkflowDefinition().getTasks();
for (WorkflowTask wftask : workflowTasks) {
TaskModel.Status status = taskStatusMap.get(wftask.getTaskReferenceName());
if (status == null || !status.isTerminal()) {
return false;
}
}
boolean noPendingSchedule =
nonExecutedTasks.stream()
.parallel()
.noneMatch(
wftask -> {
String next = getNextTasksToBeScheduled(workflow, wftask);
return next != null && !taskStatusMap.containsKey(next);
});
return noPendingSchedule;
}
List<TaskModel> getNextTask(WorkflowModel workflow, TaskModel task) {
final WorkflowDef workflowDef = workflow.getWorkflowDefinition();
// Get the following task after the last completed task
if (systemTaskRegistry.isSystemTask(task.getTaskType())
&& (TaskType.TASK_TYPE_DECISION.equals(task.getTaskType())
|| TaskType.TASK_TYPE_SWITCH.equals(task.getTaskType()))) {
if (task.getInputData().get("hasChildren") != null) {
return Collections.emptyList();
}
}
String taskReferenceName =
task.isLoopOverTask()
? TaskUtils.removeIterationFromTaskRefName(task.getReferenceTaskName())
: task.getReferenceTaskName();
WorkflowTask taskToSchedule = workflowDef.getNextTask(taskReferenceName);
while (isTaskSkipped(taskToSchedule, workflow)) {
taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName());
}
if (taskToSchedule != null && TaskType.DO_WHILE.name().equals(taskToSchedule.getType())) {
// check if already has this DO_WHILE task, ignore it if it already exists
String nextTaskReferenceName = taskToSchedule.getTaskReferenceName();
if (workflow.getTasks().stream()
.anyMatch(
runningTask ->
runningTask
.getReferenceTaskName()
.equals(nextTaskReferenceName))) {
return Collections.emptyList();
}
}
if (taskToSchedule != null) {
return getTasksToBeScheduled(workflow, taskToSchedule, 0);
}
return Collections.emptyList();
}
private String getNextTasksToBeScheduled(WorkflowModel workflow, TaskModel task) {
final WorkflowDef def = workflow.getWorkflowDefinition();
String taskReferenceName = task.getReferenceTaskName();
WorkflowTask taskToSchedule = def.getNextTask(taskReferenceName);
while (isTaskSkipped(taskToSchedule, workflow)) {
taskToSchedule = def.getNextTask(taskToSchedule.getTaskReferenceName());
}
return taskToSchedule == null ? null : taskToSchedule.getTaskReferenceName();
}
@VisibleForTesting
Optional<TaskModel> retry(
TaskDef taskDefinition,
WorkflowTask workflowTask,
TaskModel task,
WorkflowModel workflow)
throws TerminateWorkflowException {
int retryCount = task.getRetryCount();
if (taskDefinition == null) {
taskDefinition = metadataDAO.getTaskDef(task.getTaskDefName());
}
final int expectedRetryCount =
taskDefinition == null
? 0
: Optional.ofNullable(workflowTask)
.map(WorkflowTask::getRetryCount)
.orElse(taskDefinition.getRetryCount());
if (!task.getStatus().isRetriable()
|| TaskType.isBuiltIn(task.getTaskType())
|| expectedRetryCount <= retryCount) {
if (workflowTask != null
&& (workflowTask.isOptional() || workflowTask.isPermissive())) {
return Optional.empty();
}
WorkflowModel.Status status;
switch (task.getStatus()) {
case CANCELED:
status = WorkflowModel.Status.TERMINATED;
break;
case TIMED_OUT:
status = WorkflowModel.Status.TIMED_OUT;
break;
default:
status = WorkflowModel.Status.FAILED;
break;
}
updateWorkflowOutput(workflow, task);
final String errMsg =
String.format(
"Task %s failed with status: %s and reason: '%s'",
task.getTaskId(), status, task.getReasonForIncompletion());
throw new TerminateWorkflowException(errMsg, status, task);
}
// retry... - but not immediately - put a delay...
int startDelay = taskDefinition.getRetryDelaySeconds();
switch (taskDefinition.getRetryLogic()) {
case FIXED:
startDelay = taskDefinition.getRetryDelaySeconds();
break;
case LINEAR_BACKOFF:
int linearRetryDelaySeconds =
taskDefinition.getRetryDelaySeconds()
* taskDefinition.getBackoffScaleFactor()
* (task.getRetryCount() + 1);
// Reset integer overflow to max value
startDelay =
linearRetryDelaySeconds < 0 ? Integer.MAX_VALUE : linearRetryDelaySeconds;
break;
case EXPONENTIAL_BACKOFF:
int exponentialRetryDelaySeconds =
taskDefinition.getRetryDelaySeconds()
* (int) Math.pow(2, task.getRetryCount());
// Reset integer overflow to max value
startDelay =
exponentialRetryDelaySeconds < 0
? Integer.MAX_VALUE
: exponentialRetryDelaySeconds;
break;
}
task.setRetried(true);
TaskModel rescheduled = task.copy();
rescheduled.setStartDelayInSeconds(startDelay);
rescheduled.setCallbackAfterSeconds(startDelay);
rescheduled.setRetryCount(task.getRetryCount() + 1);
rescheduled.setRetried(false);
rescheduled.setTaskId(idGenerator.generate());
rescheduled.setRetriedTaskId(task.getTaskId());
rescheduled.setStatus(SCHEDULED);
rescheduled.setPollCount(0);
rescheduled.setInputData(new HashMap<>(task.getInputData()));
rescheduled.setReasonForIncompletion(null);
rescheduled.setSubWorkflowId(null);
rescheduled.setSeq(0);
rescheduled.setScheduledTime(0);
rescheduled.setStartTime(0);
rescheduled.setEndTime(0);
rescheduled.setWorkerId(null);
if (StringUtils.isNotBlank(task.getExternalInputPayloadStoragePath())) {
rescheduled.setExternalInputPayloadStoragePath(
task.getExternalInputPayloadStoragePath());
} else {
rescheduled.addInput(task.getInputData());
}
if (workflowTask != null && workflow.getWorkflowDefinition().getSchemaVersion() > 1) {
Map<String, Object> taskInput =
parametersUtils.getTaskInputV2(
workflowTask.getInputParameters(),
workflow,
rescheduled.getTaskId(),
taskDefinition);
rescheduled.addInput(taskInput);
}
// for the schema version 1, we do not have to recompute the inputs
return Optional.of(rescheduled);
}
@VisibleForTesting
void checkWorkflowTimeout(WorkflowModel workflow) {
WorkflowDef workflowDef = workflow.getWorkflowDefinition();
if (workflowDef == null) {
LOGGER.warn("Missing workflow definition : {}", workflow.getWorkflowId());
return;
}
if (workflow.getStatus().isTerminal() || workflowDef.getTimeoutSeconds() <= 0) {
return;
}
long timeout = 1000L * workflowDef.getTimeoutSeconds();
long now = System.currentTimeMillis();
long elapsedTime =
workflow.getLastRetriedTime() > 0
? now - workflow.getLastRetriedTime()
: now - workflow.getCreateTime();
if (elapsedTime < timeout) {
return;
}
String reason =
String.format(
"Workflow timed out after %d seconds. Timeout configured as %d seconds. "
+ "Timeout policy configured to %s",
elapsedTime / 1000L,
workflowDef.getTimeoutSeconds(),
workflowDef.getTimeoutPolicy().name());
switch (workflowDef.getTimeoutPolicy()) {
case ALERT_ONLY:
LOGGER.info("{} {}", workflow.getWorkflowId(), reason);
Monitors.recordWorkflowTermination(
workflow.getWorkflowName(),
WorkflowModel.Status.TIMED_OUT,
workflow.getOwnerApp());
return;
case TIME_OUT_WF:
throw new TerminateWorkflowException(reason, WorkflowModel.Status.TIMED_OUT);
}
}
@VisibleForTesting
void checkTaskTimeout(TaskDef taskDef, TaskModel task) {
if (taskDef == null) {
LOGGER.warn(
"Missing task definition for task:{}/{} in workflow:{}",
task.getTaskId(),
task.getTaskDefName(),
task.getWorkflowInstanceId());
return;
}
if (task.getStatus().isTerminal()
|| taskDef.getTimeoutSeconds() <= 0
|| task.getStartTime() <= 0) {
return;
}
long timeout = 1000L * taskDef.getTimeoutSeconds();
long now = System.currentTimeMillis();
long elapsedTime =
now - (task.getStartTime() + ((long) task.getStartDelayInSeconds() * 1000L));
if (elapsedTime < timeout) {
return;
}
String reason =
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | true |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/StartWorkflowInput.java | core/src/main/java/com/netflix/conductor/core/execution/StartWorkflowInput.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.util.Map;
import java.util.Objects;
import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
public class StartWorkflowInput {
private String name;
private Integer version;
private WorkflowDef workflowDefinition;
private Map<String, Object> workflowInput;
private String externalInputPayloadStoragePath;
private String correlationId;
private Integer priority;
private String parentWorkflowId;
private String parentWorkflowTaskId;
private String event;
private Map<String, String> taskToDomain;
private String workflowId;
private String triggeringWorkflowId;
public StartWorkflowInput() {}
public StartWorkflowInput(StartWorkflowRequest startWorkflowRequest) {
this.name = startWorkflowRequest.getName();
this.version = startWorkflowRequest.getVersion();
this.workflowDefinition = startWorkflowRequest.getWorkflowDef();
this.correlationId = startWorkflowRequest.getCorrelationId();
this.priority = startWorkflowRequest.getPriority();
this.workflowInput = startWorkflowRequest.getInput();
this.externalInputPayloadStoragePath =
startWorkflowRequest.getExternalInputPayloadStoragePath();
this.taskToDomain = startWorkflowRequest.getTaskToDomain();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getVersion() {
return version;
}
public void setVersion(Integer version) {
this.version = version;
}
public WorkflowDef getWorkflowDefinition() {
return workflowDefinition;
}
public void setWorkflowDefinition(WorkflowDef workflowDefinition) {
this.workflowDefinition = workflowDefinition;
}
public Map<String, Object> getWorkflowInput() {
return workflowInput;
}
public void setWorkflowInput(Map<String, Object> workflowInput) {
this.workflowInput = workflowInput;
}
public String getExternalInputPayloadStoragePath() {
return externalInputPayloadStoragePath;
}
public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) {
this.externalInputPayloadStoragePath = externalInputPayloadStoragePath;
}
public String getCorrelationId() {
return correlationId;
}
public void setCorrelationId(String correlationId) {
this.correlationId = correlationId;
}
public Integer getPriority() {
return priority;
}
public void setPriority(Integer priority) {
this.priority = priority;
}
public String getParentWorkflowId() {
return parentWorkflowId;
}
public void setParentWorkflowId(String parentWorkflowId) {
this.parentWorkflowId = parentWorkflowId;
}
public String getParentWorkflowTaskId() {
return parentWorkflowTaskId;
}
public void setParentWorkflowTaskId(String parentWorkflowTaskId) {
this.parentWorkflowTaskId = parentWorkflowTaskId;
}
public String getEvent() {
return event;
}
public void setEvent(String event) {
this.event = event;
}
public Map<String, String> getTaskToDomain() {
return taskToDomain;
}
public void setTaskToDomain(Map<String, String> taskToDomain) {
this.taskToDomain = taskToDomain;
}
public String getWorkflowId() {
return workflowId;
}
public void setWorkflowId(String workflowId) {
this.workflowId = workflowId;
}
public String getTriggeringWorkflowId() {
return triggeringWorkflowId;
}
public void setTriggeringWorkflowId(String triggeringWorkflowId) {
this.triggeringWorkflowId = triggeringWorkflowId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StartWorkflowInput that = (StartWorkflowInput) o;
return Objects.equals(name, that.name)
&& Objects.equals(version, that.version)
&& Objects.equals(workflowDefinition, that.workflowDefinition)
&& Objects.equals(workflowInput, that.workflowInput)
&& Objects.equals(
externalInputPayloadStoragePath, that.externalInputPayloadStoragePath)
&& Objects.equals(correlationId, that.correlationId)
&& Objects.equals(priority, that.priority)
&& Objects.equals(parentWorkflowId, that.parentWorkflowId)
&& Objects.equals(parentWorkflowTaskId, that.parentWorkflowTaskId)
&& Objects.equals(event, that.event)
&& Objects.equals(taskToDomain, that.taskToDomain)
&& Objects.equals(triggeringWorkflowId, that.triggeringWorkflowId)
&& Objects.equals(workflowId, that.workflowId);
}
@Override
public int hashCode() {
return Objects.hash(
name,
version,
workflowDefinition,
workflowInput,
externalInputPayloadStoragePath,
correlationId,
priority,
parentWorkflowId,
parentWorkflowTaskId,
event,
taskToDomain,
triggeringWorkflowId,
workflowId);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java | core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java | /*
* Copyright 2024 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution;
import java.util.List;
import com.netflix.conductor.common.metadata.tasks.TaskResult;
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest;
import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.core.exception.ConflictException;
import com.netflix.conductor.core.exception.NotFoundException;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
public interface WorkflowExecutor {
/**
* Resets callbacks for the workflow - all the scheduled tasks will be immediately ready to be
* polled
*
* @param workflowId id of the workflow
*/
void resetCallbacksForWorkflow(String workflowId);
/**
* Retrun a workflow
*
* @param request request parameters
* @return id of the workflow
*/
String rerun(RerunWorkflowRequest request);
/**
* Restart the workflow from the beginning. If useLatestDefinitions is specified - use the
* latest definition
*
* @param workflowId id of the workflow
* @param useLatestDefinitions use latest definition if specified as true
* @throws ConflictException if the workflow is not in terminal state
* @throws NotFoundException if no such workflow by id
*/
void restart(String workflowId, boolean useLatestDefinitions)
throws ConflictException, NotFoundException;
/**
* Gets the last instance of each failed task and reschedule each Gets all cancelled tasks and
* schedule all of them except JOIN (join should change status to INPROGRESS) Switch workflow
* back to RUNNING status and call decider.
*
* @param workflowId the id of the workflow to be retried
* @param resumeSubworkflowTasks Resumes the tasks inside the subworkflow if given
*/
void retry(String workflowId, boolean resumeSubworkflowTasks);
/**
* @param taskResult the task result to be updated.
* @throws IllegalArgumentException if the {@link TaskResult} is null.
* @throws NotFoundException if the Task is not found.
*/
TaskModel updateTask(TaskResult taskResult);
/**
* @param taskId id of the task
* @return task
*/
TaskModel getTask(String taskId);
/**
* @param workflowName name of the workflow
* @param version version
* @return list of running workflows
*/
List<Workflow> getRunningWorkflows(String workflowName, int version);
/**
* @param name name of the workflow
* @param version version
* @param startTime from when
* @param endTime till when
* @return list of workflow ids matching criteria
*/
List<String> getWorkflows(String name, Integer version, Long startTime, Long endTime);
/**
* @param workflowName name
* @param version version
* @return list of running workflow ids
*/
List<String> getRunningWorkflowIds(String workflowName, int version);
/**
* @param workflowId id of the workflow to be evaluated
* @return updated workflow
*/
WorkflowModel decide(String workflowId);
/**
* @param workflowId id of the workflow to be terminated
* @param reason termination reason to be recorded
*/
void terminateWorkflow(String workflowId, String reason);
/**
* @param workflow the workflow to be terminated
* @param reason the reason for termination
* @param failureWorkflow the failure workflow (if any) to be triggered as a result of this
* termination
*/
WorkflowModel terminateWorkflow(WorkflowModel workflow, String reason, String failureWorkflow);
/**
* @param workflowId
*/
void pauseWorkflow(String workflowId);
/**
* @param workflowId the workflow to be resumed
* @throws IllegalStateException if the workflow is not in PAUSED state
*/
void resumeWorkflow(String workflowId);
/**
* @param workflowId the id of the workflow
* @param taskReferenceName the referenceName of the task to be skipped
* @param skipTaskRequest the {@link SkipTaskRequest} object
* @throws IllegalStateException
*/
void skipTaskFromWorkflow(
String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest);
/**
* @param workflowId id of the workflow
* @param includeTasks includes the tasks if specified
* @return
*/
WorkflowModel getWorkflow(String workflowId, boolean includeTasks);
/**
* Used by tasks such as do while
*
* @param task parent task
* @param workflow workflow
*/
void scheduleNextIteration(TaskModel task, WorkflowModel workflow);
/**
* @param input Starts a new workflow execution
* @return id of the workflow
*/
String startWorkflow(StartWorkflowInput input);
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.exception.NonTransientException;
import com.netflix.conductor.core.exception.TransientException;
import com.netflix.conductor.core.execution.StartWorkflowInput;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import com.fasterxml.jackson.databind.ObjectMapper;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW;
@Component(TASK_TYPE_SUB_WORKFLOW)
public class SubWorkflow extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(SubWorkflow.class);
private static final String SUB_WORKFLOW_ID = "subWorkflowId";
private final ObjectMapper objectMapper;
public SubWorkflow(ObjectMapper objectMapper) {
super(TASK_TYPE_SUB_WORKFLOW);
this.objectMapper = objectMapper;
}
@SuppressWarnings("unchecked")
@Override
public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
Map<String, Object> input = task.getInputData();
String name = input.get("subWorkflowName").toString();
int version = (int) input.get("subWorkflowVersion");
WorkflowDef workflowDefinition = null;
if (input.get("subWorkflowDefinition") != null) {
// convert the value back to workflow definition object
workflowDefinition =
objectMapper.convertValue(
input.get("subWorkflowDefinition"), WorkflowDef.class);
name = workflowDefinition.getName();
}
Map<String, String> taskToDomain = workflow.getTaskToDomain();
if (input.get("subWorkflowTaskToDomain") instanceof Map) {
taskToDomain = (Map<String, String>) input.get("subWorkflowTaskToDomain");
}
var wfInput = (Map<String, Object>) input.get("workflowInput");
if (wfInput == null || wfInput.isEmpty()) {
wfInput = input;
}
String correlationId = workflow.getCorrelationId();
try {
StartWorkflowInput startWorkflowInput = new StartWorkflowInput();
startWorkflowInput.setWorkflowDefinition(workflowDefinition);
startWorkflowInput.setName(name);
startWorkflowInput.setVersion(version);
startWorkflowInput.setWorkflowInput(wfInput);
startWorkflowInput.setCorrelationId(correlationId);
startWorkflowInput.setParentWorkflowId(workflow.getWorkflowId());
startWorkflowInput.setParentWorkflowTaskId(task.getTaskId());
startWorkflowInput.setTaskToDomain(taskToDomain);
String subWorkflowId = workflowExecutor.startWorkflow(startWorkflowInput);
task.setSubWorkflowId(subWorkflowId);
// For backwards compatibility
task.addOutput(SUB_WORKFLOW_ID, subWorkflowId);
// Set task status based on current sub-workflow status, as the status can change in
// recursion by the time we update here.
WorkflowModel subWorkflow = workflowExecutor.getWorkflow(subWorkflowId, false);
updateTaskStatus(subWorkflow, task);
} catch (TransientException te) {
LOGGER.info(
"A transient backend error happened when task {} in {} tried to start sub workflow {}.",
task.getTaskId(),
workflow.toShortString(),
name);
} catch (Exception ae) {
task.setStatus(TaskModel.Status.FAILED);
task.setReasonForIncompletion(ae.getMessage());
LOGGER.error(
"Error starting sub workflow: {} from workflow: {}",
name,
workflow.toShortString(),
ae);
}
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
String workflowId = task.getSubWorkflowId();
if (StringUtils.isEmpty(workflowId)) {
return false;
}
WorkflowModel subWorkflow = workflowExecutor.getWorkflow(workflowId, false);
WorkflowModel.Status subWorkflowStatus = subWorkflow.getStatus();
if (!subWorkflowStatus.isTerminal()) {
return false;
}
updateTaskStatus(subWorkflow, task);
return true;
}
@Override
public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
String workflowId = task.getSubWorkflowId();
if (StringUtils.isEmpty(workflowId)) {
return;
}
WorkflowModel subWorkflow = workflowExecutor.getWorkflow(workflowId, true);
subWorkflow.setStatus(WorkflowModel.Status.TERMINATED);
String reason =
StringUtils.isEmpty(workflow.getReasonForIncompletion())
? "Parent workflow has been terminated with status " + workflow.getStatus()
: "Parent workflow has been terminated with reason: "
+ workflow.getReasonForIncompletion();
workflowExecutor.terminateWorkflow(subWorkflow, reason, null);
}
/**
* Keep Subworkflow task asyncComplete. The Subworkflow task will be executed once
* asynchronously to move to IN_PROGRESS state, and will move to termination by Subworkflow's
* completeWorkflow logic, there by avoiding periodic polling.
*
* @param task
* @return
*/
@Override
public boolean isAsyncComplete(TaskModel task) {
return true;
}
private void updateTaskStatus(WorkflowModel subworkflow, TaskModel task) {
WorkflowModel.Status status = subworkflow.getStatus();
switch (status) {
case RUNNING:
case PAUSED:
task.setStatus(TaskModel.Status.IN_PROGRESS);
break;
case COMPLETED:
task.setStatus(TaskModel.Status.COMPLETED);
break;
case FAILED:
task.setStatus(TaskModel.Status.FAILED);
break;
case TERMINATED:
task.setStatus(TaskModel.Status.CANCELED);
break;
case TIMED_OUT:
task.setStatus(TaskModel.Status.TIMED_OUT);
break;
default:
throw new NonTransientException(
"Subworkflow status does not conform to relevant task status.");
}
if (status.isTerminal()) {
if (subworkflow.getExternalOutputPayloadStoragePath() != null) {
task.setExternalOutputPayloadStoragePath(
subworkflow.getExternalOutputPayloadStoragePath());
} else {
task.addOutput(subworkflow.getOutput());
}
if (!status.isSuccessful()) {
task.setReasonForIncompletion(
String.format(
"Sub workflow %s failure reason: %s",
subworkflow.toShortString(),
subworkflow.getReasonForIncompletion()));
}
}
}
/**
* We don't need the tasks when retrieving the workflow data.
*
* @return false
*/
@Override
public boolean isTaskRetrievalRequired() {
return false;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/Inline.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/Inline.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.exception.TerminateWorkflowException;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.evaluators.Evaluator;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_INLINE;
// @formatter:off
/**
* @author X-Ultra
* <p>Task that enables execute inline script at workflow execution.
* <p>Example: { "tasks": [ { "name": "INLINE", "taskReferenceName": "inline_test", "type":
* "INLINE", "inputParameters": { "input": "${workflow.input}", "evaluatorType": "javascript",
* "expression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }"
* } } ] }
* <p>The evaluatorType parameter is optional and defaults to "javascript" for backward
* compatibility. Supported values include: - "javascript" - JavaScript evaluation using GraalJS
* engine (default) - "graaljs" - Explicit GraalJS evaluation (same as "javascript") - "python"
* - Python evaluation using GraalVM Python
* <p>To use task output, reference it as script_test.output.testvalue This is a replacement for
* the deprecated Lambda task.
*/
// @formatter:on
@Component(TASK_TYPE_INLINE)
public class Inline extends WorkflowSystemTask {
private static final Logger LOGGER = LoggerFactory.getLogger(Inline.class);
private static final String QUERY_EVALUATOR_TYPE = "evaluatorType";
private static final String QUERY_EXPRESSION_PARAMETER = "expression";
public static final String NAME = "INLINE";
private final Map<String, Evaluator> evaluators;
public Inline(Map<String, Evaluator> evaluators) {
super(TASK_TYPE_INLINE);
this.evaluators = evaluators;
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
Map<String, Object> taskInput = task.getInputData();
// Get evaluatorType, default to "javascript" for backward compatibility if missing
String evaluatorType = (String) taskInput.get(QUERY_EVALUATOR_TYPE);
if (evaluatorType == null) {
evaluatorType = "javascript";
}
String expression = (String) taskInput.get(QUERY_EXPRESSION_PARAMETER);
try {
checkEvaluatorType(evaluatorType);
checkExpression(expression);
Evaluator evaluator = evaluators.get(evaluatorType);
Object evalResult = evaluator.evaluate(expression, taskInput);
task.addOutput("result", evalResult);
task.setStatus(TaskModel.Status.COMPLETED);
} catch (Exception e) {
String errorMessage = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
LOGGER.error(
"Failed to execute Inline Task: {} in workflow: {}",
task.getTaskId(),
workflow.getWorkflowId(),
e);
// TerminateWorkflowException is thrown when the script evaluation fails
// Retry will result in the same error, so FAILED_WITH_TERMINAL_ERROR status is used.
task.setStatus(
e instanceof TerminateWorkflowException
? TaskModel.Status.FAILED_WITH_TERMINAL_ERROR
: TaskModel.Status.FAILED);
task.setReasonForIncompletion(errorMessage);
task.addOutput("error", errorMessage);
}
return true;
}
private void checkEvaluatorType(String evaluatorType) {
// evaluatorType is now optional with "javascript" as default, but must not be blank if
// provided
if (StringUtils.isBlank(evaluatorType)) {
LOGGER.error("Empty {} in INLINE task. ", QUERY_EVALUATOR_TYPE);
throw new TerminateWorkflowException(
"Empty '"
+ QUERY_EVALUATOR_TYPE
+ "' in INLINE task's input parameters. A non-empty String value must be provided.");
}
if (evaluators.get(evaluatorType) == null) {
LOGGER.error("Evaluator {} for INLINE task not registered", evaluatorType);
throw new TerminateWorkflowException(
"Unknown evaluator '" + evaluatorType + "' in INLINE task.");
}
}
private void checkExpression(String expression) {
if (StringUtils.isBlank(expression)) {
LOGGER.error("Empty {} in INLINE task. ", QUERY_EXPRESSION_PARAMETER);
throw new TerminateWorkflowException(
"Empty '"
+ QUERY_EXPRESSION_PARAMETER
+ "' in Inline task's input parameters. A non-empty String value must be provided.");
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/Terminate.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/Terminate.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.HashMap;
import java.util.Map;
import org.springframework.stereotype.Component;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_TERMINATE;
import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.*;
/**
* Task that can terminate a workflow with a given status and modify the workflow's output with a
* given parameter, it can act as a "return" statement for conditions where you simply want to
* terminate your workflow. For example, if you have a decision where the first condition is met,
* you want to execute some tasks, otherwise you want to finish your workflow.
*
* <pre>
* ...
* {
* "tasks": [
* {
* "name": "terminate",
* "taskReferenceName": "terminate0",
* "inputParameters": {
* "terminationStatus": "COMPLETED",
* "workflowOutput": "${task0.output}"
* },
* "type": "TERMINATE",
* "startDelay": 0,
* "optional": false
* }
* ]
* }
* ...
* </pre>
*
* This task has some validations on creation and execution, they are: - the "terminationStatus"
* parameter is mandatory and it can only receive the values "COMPLETED" or "FAILED" - the terminate
* task cannot be optional
*/
@Component(TASK_TYPE_TERMINATE)
public class Terminate extends WorkflowSystemTask {
private static final String TERMINATION_STATUS_PARAMETER = "terminationStatus";
private static final String TERMINATION_REASON_PARAMETER = "terminationReason";
private static final String TERMINATION_WORKFLOW_OUTPUT = "workflowOutput";
public Terminate() {
super(TASK_TYPE_TERMINATE);
}
@Override
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
String returnStatus = (String) task.getInputData().get(TERMINATION_STATUS_PARAMETER);
if (validateInputStatus(returnStatus)) {
task.setOutputData(getInputFromParam(task.getInputData()));
task.setStatus(TaskModel.Status.COMPLETED);
return true;
}
task.setReasonForIncompletion("given termination status is not valid");
task.setStatus(TaskModel.Status.FAILED);
return false;
}
public static String getTerminationStatusParameter() {
return TERMINATION_STATUS_PARAMETER;
}
public static String getTerminationReasonParameter() {
return TERMINATION_REASON_PARAMETER;
}
public static String getTerminationWorkflowOutputParameter() {
return TERMINATION_WORKFLOW_OUTPUT;
}
public static Boolean validateInputStatus(String status) {
return COMPLETED.name().equals(status)
|| FAILED.name().equals(status)
|| TERMINATED.name().equals(status);
}
@SuppressWarnings("unchecked")
private Map<String, Object> getInputFromParam(Map<String, Object> taskInput) {
HashMap<String, Object> output = new HashMap<>();
if (taskInput.get(TERMINATION_WORKFLOW_OUTPUT) == null) {
return output;
}
if (taskInput.get(TERMINATION_WORKFLOW_OUTPUT) instanceof HashMap) {
output.putAll((HashMap<String, Object>) taskInput.get(TERMINATION_WORKFLOW_OUTPUT));
return output;
}
output.put("output", taskInput.get(TERMINATION_WORKFLOW_OUTPUT));
return output;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducer.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/IsolatedTaskQueueProducer.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.core.utils.QueueUtils;
import com.netflix.conductor.service.MetadataService;
import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER;
@Component
@ConditionalOnProperty(
name = "conductor.system-task-workers.enabled",
havingValue = "true",
matchIfMissing = true)
public class IsolatedTaskQueueProducer {
private static final Logger LOGGER = LoggerFactory.getLogger(IsolatedTaskQueueProducer.class);
private final MetadataService metadataService;
private final Set<WorkflowSystemTask> asyncSystemTasks;
private final SystemTaskWorker systemTaskWorker;
private final Set<String> listeningQueues = new HashSet<>();
public IsolatedTaskQueueProducer(
MetadataService metadataService,
@Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set<WorkflowSystemTask> asyncSystemTasks,
SystemTaskWorker systemTaskWorker,
@Value("${conductor.app.isolatedSystemTaskEnabled:false}")
boolean isolatedSystemTaskEnabled,
@Value("${conductor.app.isolatedSystemTaskQueuePollInterval:10s}")
Duration isolatedSystemTaskQueuePollInterval) {
this.metadataService = metadataService;
this.asyncSystemTasks = asyncSystemTasks;
this.systemTaskWorker = systemTaskWorker;
if (isolatedSystemTaskEnabled) {
LOGGER.info("Listening for isolation groups");
Executors.newSingleThreadScheduledExecutor()
.scheduleWithFixedDelay(
this::addTaskQueues,
1000,
isolatedSystemTaskQueuePollInterval.toMillis(),
TimeUnit.MILLISECONDS);
} else {
LOGGER.info("Isolated System Task Worker DISABLED");
}
}
private Set<TaskDef> getIsolationExecutionNameSpaces() {
Set<TaskDef> isolationExecutionNameSpaces = Collections.emptySet();
try {
List<TaskDef> taskDefs = metadataService.getTaskDefs();
isolationExecutionNameSpaces =
taskDefs.stream()
.filter(
taskDef ->
StringUtils.isNotBlank(taskDef.getIsolationGroupId())
|| StringUtils.isNotBlank(
taskDef.getExecutionNameSpace()))
.collect(Collectors.toSet());
} catch (RuntimeException e) {
LOGGER.error(
"Unknown exception received in getting isolation groups, sleeping and retrying",
e);
}
return isolationExecutionNameSpaces;
}
@VisibleForTesting
void addTaskQueues() {
Set<TaskDef> isolationTaskDefs = getIsolationExecutionNameSpaces();
LOGGER.debug("Retrieved queues {}", isolationTaskDefs);
for (TaskDef isolatedTaskDef : isolationTaskDefs) {
for (WorkflowSystemTask systemTask : this.asyncSystemTasks) {
String taskQueue =
QueueUtils.getQueueName(
systemTask.getTaskType(),
null,
isolatedTaskDef.getIsolationGroupId(),
isolatedTaskDef.getExecutionNameSpace());
LOGGER.debug("Adding taskQueue:'{}' to system task worker coordinator", taskQueue);
if (!listeningQueues.contains(taskQueue)) {
systemTaskWorker.startPolling(systemTask, taskQueue);
listeningQueues.add(taskQueue);
}
}
}
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskRegistry.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskRegistry.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.stereotype.Component;
/**
* A container class that holds a mapping of system task types {@link
* com.netflix.conductor.common.metadata.tasks.TaskType} to {@link WorkflowSystemTask} instances.
*/
@Component
public class SystemTaskRegistry {
public static final String ASYNC_SYSTEM_TASKS_QUALIFIER = "asyncSystemTasks";
private final Map<String, WorkflowSystemTask> registry;
public SystemTaskRegistry(Set<WorkflowSystemTask> tasks) {
this.registry =
tasks.stream()
.collect(
Collectors.toMap(
WorkflowSystemTask::getTaskType, Function.identity()));
}
public WorkflowSystemTask get(String taskType) {
return Optional.ofNullable(registry.get(taskType))
.orElseThrow(
() ->
new IllegalStateException(
taskType + "not found in " + getClass().getSimpleName()));
}
public boolean isSystemTask(String taskType) {
return registry.containsKey(taskType);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/Join.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/Join.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import org.springframework.stereotype.Component;
import com.netflix.conductor.annotations.VisibleForTesting;
import com.netflix.conductor.common.utils.TaskUtils;
import com.netflix.conductor.core.config.ConductorProperties;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.model.TaskModel;
import com.netflix.conductor.model.WorkflowModel;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JOIN;
@Component(TASK_TYPE_JOIN)
public class Join extends WorkflowSystemTask {
@VisibleForTesting static final double EVALUATION_OFFSET_BASE = 1.2;
private final ConductorProperties properties;
public Join(ConductorProperties properties) {
super(TASK_TYPE_JOIN);
this.properties = properties;
}
@Override
@SuppressWarnings("unchecked")
public boolean execute(
WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) {
StringBuilder failureReason = new StringBuilder();
StringBuilder optionalTaskFailures = new StringBuilder();
List<String> joinOn = (List<String>) task.getInputData().get("joinOn");
if (task.isLoopOverTask()) {
// If join is part of loop over task, wait for specific iteration to get complete
joinOn =
joinOn.stream()
.map(name -> TaskUtils.appendIteration(name, task.getIteration()))
.toList();
}
boolean allTasksTerminal =
joinOn.stream()
.map(workflow::getTaskByRefName)
.allMatch(t -> t != null && t.getStatus().isTerminal());
for (String joinOnRef : joinOn) {
TaskModel forkedTask = workflow.getTaskByRefName(joinOnRef);
if (forkedTask == null) {
// Continue checking other tasks if a referenced task is not yet scheduled
continue;
}
TaskModel.Status taskStatus = forkedTask.getStatus();
// Only add to task output if it's not empty
if (!forkedTask.getOutputData().isEmpty()) {
task.addOutput(joinOnRef, forkedTask.getOutputData());
}
// Determine if the join task fails immediately due to a non-optional, non-permissive
// task failure,
// or waits for all tasks to be terminal if the failed task is permissive.
var isJoinFailure =
!taskStatus.isSuccessful()
&& !forkedTask.getWorkflowTask().isOptional()
&& (!forkedTask.getWorkflowTask().isPermissive() || allTasksTerminal);
if (isJoinFailure) {
final String failureReasons =
joinOn.stream()
.map(workflow::getTaskByRefName)
.filter(Objects::nonNull)
.filter(t -> !t.getStatus().isSuccessful())
.map(TaskModel::getReasonForIncompletion)
.collect(Collectors.joining(" "));
failureReason.append(failureReasons);
task.setReasonForIncompletion(failureReason.toString());
task.setStatus(TaskModel.Status.FAILED);
return true;
}
// check for optional task failures
if (forkedTask.getWorkflowTask().isOptional()
&& taskStatus == TaskModel.Status.COMPLETED_WITH_ERRORS) {
optionalTaskFailures
.append(
String.format(
"%s/%s",
forkedTask.getTaskDefName(), forkedTask.getTaskId()))
.append(" ");
}
}
// Finalize the join task's status based on the outcomes of all referenced tasks.
if (allTasksTerminal) {
if (!optionalTaskFailures.isEmpty()) {
task.setStatus(TaskModel.Status.COMPLETED_WITH_ERRORS);
optionalTaskFailures.append("completed with errors");
task.setReasonForIncompletion(optionalTaskFailures.toString());
} else {
task.setStatus(TaskModel.Status.COMPLETED);
}
return true;
}
// Task execution not complete, waiting on more tasks to reach terminal state.
return false;
}
@Override
public Optional<Long> getEvaluationOffset(TaskModel taskModel, long maxOffset) {
int pollCount = taskModel.getPollCount();
// Assuming pollInterval = 50ms and evaluationOffsetThreshold = 200 this will cause
// a JOIN task to be evaluated continuously during the first 10 seconds and the FORK/JOIN
// will end with minimal delay.
if (pollCount <= properties.getSystemTaskPostponeThreshold()) {
return Optional.of(0L);
}
double exp = pollCount - properties.getSystemTaskPostponeThreshold();
return Optional.of(Math.min((long) Math.pow(EVALUATION_OFFSET_BASE, exp), maxOffset));
}
public boolean isAsync() {
return true;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/Fork.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/Fork.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import org.springframework.stereotype.Component;
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_FORK;
@Component(TASK_TYPE_FORK)
public class Fork extends WorkflowSystemTask {
public Fork() {
super(TASK_TYPE_FORK);
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
conductor-oss/conductor | https://github.com/conductor-oss/conductor/blob/aa7de922578fe59d1d145881299b1a8306dde3b0/core/src/main/java/com/netflix/conductor/core/execution/tasks/ExecutionConfig.java | core/src/main/java/com/netflix/conductor/core/execution/tasks/ExecutionConfig.java | /*
* Copyright 2022 Conductor Authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.execution.tasks;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import com.netflix.conductor.core.utils.SemaphoreUtil;
class ExecutionConfig {
private final ExecutorService executorService;
private final SemaphoreUtil semaphoreUtil;
ExecutionConfig(int threadCount, String threadNameFormat) {
this.executorService =
Executors.newFixedThreadPool(
threadCount,
new BasicThreadFactory.Builder().namingPattern(threadNameFormat).build());
this.semaphoreUtil = new SemaphoreUtil(threadCount);
}
public ExecutorService getExecutorService() {
return executorService;
}
public SemaphoreUtil getSemaphoreUtil() {
return semaphoreUtil;
}
}
| java | Apache-2.0 | aa7de922578fe59d1d145881299b1a8306dde3b0 | 2026-01-04T14:46:58.351252Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.