index
int64
0
0
repo_id
stringlengths
26
205
file_path
stringlengths
51
246
content
stringlengths
8
433k
__index_level_0__
int64
0
10k
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/SystemTaskWorkerCoordinator.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.tasks; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.context.event.EventListener; import org.springframework.stereotype.Component; import com.netflix.conductor.annotations.VisibleForTesting; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.utils.QueueUtils; import static com.netflix.conductor.core.execution.tasks.SystemTaskRegistry.ASYNC_SYSTEM_TASKS_QUALIFIER; @Component @ConditionalOnProperty( name = "conductor.system-task-workers.enabled", havingValue = "true", matchIfMissing = true) public class SystemTaskWorkerCoordinator { private static final Logger LOGGER = LoggerFactory.getLogger(SystemTaskWorkerCoordinator.class); private final SystemTaskWorker systemTaskWorker; private final String executionNameSpace; private final Set<WorkflowSystemTask> asyncSystemTasks; public SystemTaskWorkerCoordinator( SystemTaskWorker systemTaskWorker, ConductorProperties properties, @Qualifier(ASYNC_SYSTEM_TASKS_QUALIFIER) Set<WorkflowSystemTask> asyncSystemTasks) { this.systemTaskWorker = systemTaskWorker; this.asyncSystemTasks = asyncSystemTasks; this.executionNameSpace = properties.getSystemTaskWorkerExecutionNamespace(); } @EventListener(ApplicationReadyEvent.class) public void initSystemTaskExecutor() { this.asyncSystemTasks.stream() .filter(this::isFromCoordinatorExecutionNameSpace) .forEach(this.systemTaskWorker::startPolling); LOGGER.info( "{} initialized with {} async tasks", SystemTaskWorkerCoordinator.class.getSimpleName(), this.asyncSystemTasks.size()); } @VisibleForTesting boolean isFromCoordinatorExecutionNameSpace(WorkflowSystemTask systemTask) { String queueExecutionNameSpace = QueueUtils.getExecutionNameSpace(systemTask.getTaskType()); return StringUtils.equals(queueExecutionNameSpace, executionNameSpace); } }
6,700
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/tasks/Wait.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.tasks; import org.springframework.stereotype.Component; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; import static com.netflix.conductor.model.TaskModel.Status.*; @Component(TASK_TYPE_WAIT) public class Wait extends WorkflowSystemTask { public static final String DURATION_INPUT = "duration"; public static final String UNTIL_INPUT = "until"; public Wait() { super(TASK_TYPE_WAIT); } @Override public void cancel(WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { task.setStatus(TaskModel.Status.CANCELED); } @Override public boolean execute( WorkflowModel workflow, TaskModel task, WorkflowExecutor workflowExecutor) { long timeOut = task.getWaitTimeout(); if (timeOut == 0) { return false; } if (System.currentTimeMillis() > timeOut) { task.setStatus(COMPLETED); return true; } return false; } public boolean isAsync() { return true; } }
6,701
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/KafkaPublishTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; @Component public class KafkaPublishTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(KafkaPublishTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; @Autowired public KafkaPublishTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.KAFKA_PUBLISH.name(); } /** * This method maps a {@link WorkflowTask} of type {@link TaskType#KAFKA_PUBLISH} to a {@link * TaskModel} in a {@link TaskModel.Status#SCHEDULED} state * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return a List with just one Kafka task * @throws TerminateWorkflowException In case if the task definition does not exist */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in KafkaPublishTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); int retryCount = taskMapperContext.getRetryCount(); TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); Map<String, Object> input = parametersUtils.getTaskInputV2( workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); TaskModel kafkaPublishTask = taskMapperContext.createTaskModel(); kafkaPublishTask.setInputData(input); kafkaPublishTask.setStatus(TaskModel.Status.SCHEDULED); kafkaPublishTask.setRetryCount(retryCount); kafkaPublishTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); if (Objects.nonNull(taskDefinition)) { kafkaPublishTask.setExecutionNameSpace(taskDefinition.getExecutionNameSpace()); kafkaPublishTask.setIsolationGroupId(taskDefinition.getIsolationGroupId()); kafkaPublishTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); kafkaPublishTask.setRateLimitFrequencyInSeconds( taskDefinition.getRateLimitFrequencyInSeconds()); } return Collections.singletonList(kafkaPublishTask); } }
6,702
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/TerminateTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_TERMINATE; @Component public class TerminateTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(TerminateTaskMapper.class); private final ParametersUtils parametersUtils; public TerminateTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; } @Override public String getTaskType() { return TaskType.TERMINATE.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { logger.debug("TaskMapperContext {} in TerminateTaskMapper", taskMapperContext); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); Map<String, Object> taskInput = parametersUtils.getTaskInputV2( taskMapperContext.getWorkflowTask().getInputParameters(), workflowModel, taskId, null); TaskModel task = taskMapperContext.createTaskModel(); task.setTaskType(TASK_TYPE_TERMINATE); task.setStartTime(System.currentTimeMillis()); task.setInputData(taskInput); task.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(task); } }
6,703
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.Map; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.execution.DeciderService; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** Business Object class used for interaction between the DeciderService and Different Mappers */ public class TaskMapperContext { private final WorkflowModel workflowModel; private final TaskDef taskDefinition; private final WorkflowTask workflowTask; private final Map<String, Object> taskInput; private final int retryCount; private final String retryTaskId; private final String taskId; private final DeciderService deciderService; private TaskMapperContext(Builder builder) { workflowModel = builder.workflowModel; taskDefinition = builder.taskDefinition; workflowTask = builder.workflowTask; taskInput = builder.taskInput; retryCount = builder.retryCount; retryTaskId = builder.retryTaskId; taskId = builder.taskId; deciderService = builder.deciderService; } public static Builder newBuilder() { return new Builder(); } public static Builder newBuilder(TaskMapperContext copy) { Builder builder = new Builder(); builder.workflowModel = copy.getWorkflowModel(); builder.taskDefinition = copy.getTaskDefinition(); builder.workflowTask = copy.getWorkflowTask(); builder.taskInput = copy.getTaskInput(); builder.retryCount = copy.getRetryCount(); builder.retryTaskId = copy.getRetryTaskId(); builder.taskId = copy.getTaskId(); builder.deciderService = copy.getDeciderService(); return builder; } public WorkflowDef getWorkflowDefinition() { return workflowModel.getWorkflowDefinition(); } public WorkflowModel getWorkflowModel() { return workflowModel; } public TaskDef getTaskDefinition() { return taskDefinition; } public WorkflowTask getWorkflowTask() { return workflowTask; } public int getRetryCount() { return retryCount; } public String getRetryTaskId() { return retryTaskId; } public String getTaskId() { return taskId; } public Map<String, Object> getTaskInput() { return taskInput; } public DeciderService getDeciderService() { return deciderService; } public TaskModel createTaskModel() { TaskModel taskModel = new TaskModel(); taskModel.setReferenceTaskName(workflowTask.getTaskReferenceName()); taskModel.setWorkflowInstanceId(workflowModel.getWorkflowId()); taskModel.setWorkflowType(workflowModel.getWorkflowName()); taskModel.setCorrelationId(workflowModel.getCorrelationId()); taskModel.setScheduledTime(System.currentTimeMillis()); taskModel.setTaskId(taskId); taskModel.setWorkflowTask(workflowTask); taskModel.setWorkflowPriority(workflowModel.getPriority()); // the following properties are overridden by some TaskMapper implementations taskModel.setTaskType(workflowTask.getType()); taskModel.setTaskDefName(workflowTask.getName()); return taskModel; } @Override public String toString() { return "TaskMapperContext{" + "workflowDefinition=" + getWorkflowDefinition() + ", workflowModel=" + workflowModel + ", workflowTask=" + workflowTask + ", taskInput=" + taskInput + ", retryCount=" + retryCount + ", retryTaskId='" + retryTaskId + '\'' + ", taskId='" + taskId + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof TaskMapperContext)) { return false; } TaskMapperContext that = (TaskMapperContext) o; if (getRetryCount() != that.getRetryCount()) { return false; } if (!getWorkflowDefinition().equals(that.getWorkflowDefinition())) { return false; } if (!getWorkflowModel().equals(that.getWorkflowModel())) { return false; } if (!getWorkflowTask().equals(that.getWorkflowTask())) { return false; } if (!getTaskInput().equals(that.getTaskInput())) { return false; } if (getRetryTaskId() != null ? !getRetryTaskId().equals(that.getRetryTaskId()) : that.getRetryTaskId() != null) { return false; } return getTaskId().equals(that.getTaskId()); } @Override public int hashCode() { int result = getWorkflowDefinition().hashCode(); result = 31 * result + getWorkflowModel().hashCode(); result = 31 * result + getWorkflowTask().hashCode(); result = 31 * result + getTaskInput().hashCode(); result = 31 * result + getRetryCount(); result = 31 * result + (getRetryTaskId() != null ? getRetryTaskId().hashCode() : 0); result = 31 * result + getTaskId().hashCode(); return result; } /** {@code TaskMapperContext} builder static inner class. */ public static final class Builder { private WorkflowModel workflowModel; private TaskDef taskDefinition; private WorkflowTask workflowTask; private Map<String, Object> taskInput; private int retryCount; private String retryTaskId; private String taskId; private DeciderService deciderService; private Builder() {} /** * Sets the {@code workflowModel} and returns a reference to this Builder so that the * methods can be chained together. * * @param val the {@code workflowModel} to set * @return a reference to this Builder */ public Builder withWorkflowModel(WorkflowModel val) { workflowModel = val; return this; } /** * Sets the {@code taskDefinition} and returns a reference to this Builder so that the * methods can be chained together. * * @param val the {@code taskDefinition} to set * @return a reference to this Builder */ public Builder withTaskDefinition(TaskDef val) { taskDefinition = val; return this; } /** * Sets the {@code workflowTask} and returns a reference to this Builder so that the methods * can be chained together. * * @param val the {@code workflowTask} to set * @return a reference to this Builder */ public Builder withWorkflowTask(WorkflowTask val) { workflowTask = val; return this; } /** * Sets the {@code taskInput} and returns a reference to this Builder so that the methods * can be chained together. * * @param val the {@code taskInput} to set * @return a reference to this Builder */ public Builder withTaskInput(Map<String, Object> val) { taskInput = val; return this; } /** * Sets the {@code retryCount} and returns a reference to this Builder so that the methods * can be chained together. * * @param val the {@code retryCount} to set * @return a reference to this Builder */ public Builder withRetryCount(int val) { retryCount = val; return this; } /** * Sets the {@code retryTaskId} and returns a reference to this Builder so that the methods * can be chained together. * * @param val the {@code retryTaskId} to set * @return a reference to this Builder */ public Builder withRetryTaskId(String val) { retryTaskId = val; return this; } /** * Sets the {@code taskId} and returns a reference to this Builder so that the methods can * be chained together. * * @param val the {@code taskId} to set * @return a reference to this Builder */ public Builder withTaskId(String val) { taskId = val; return this; } /** * Sets the {@code deciderService} and returns a reference to this Builder so that the * methods can be chained together. * * @param val the {@code deciderService} to set * @return a reference to this Builder */ public Builder withDeciderService(DeciderService val) { deciderService = val; return this; } /** * Returns a {@code TaskMapperContext} built from the parameters previously set. * * @return a {@code TaskMapperContext} built with parameters of this {@code * TaskMapperContext.Builder} */ public TaskMapperContext build() { return new TaskMapperContext(this); } } }
6,704
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/InlineTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#INLINE} to a List {@link TaskModel} starting with Task of type {@link TaskType#INLINE} * which is marked as IN_PROGRESS, followed by the list of {@link TaskModel} based on the case * expression evaluation in the Inline task. */ @Component public class InlineTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(InlineTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; public InlineTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.INLINE.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in InlineTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); Map<String, Object> taskInput = parametersUtils.getTaskInputV2( taskMapperContext.getWorkflowTask().getInputParameters(), workflowModel, taskId, taskDefinition); TaskModel inlineTask = taskMapperContext.createTaskModel(); inlineTask.setTaskType(TaskType.TASK_TYPE_INLINE); inlineTask.setStartTime(System.currentTimeMillis()); inlineTask.setInputData(taskInput); inlineTask.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(inlineTask); } }
6,705
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/JsonJQTransformTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; @Component public class JsonJQTransformTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(JsonJQTransformTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; public JsonJQTransformTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.JSON_JQ_TRANSFORM.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in JsonJQTransformTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); Map<String, Object> taskInput = parametersUtils.getTaskInputV2( workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); TaskModel jsonJQTransformTask = taskMapperContext.createTaskModel(); jsonJQTransformTask.setStartTime(System.currentTimeMillis()); jsonJQTransformTask.setInputData(taskInput); jsonJQTransformTask.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(jsonJQTransformTask); } }
6,706
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.model.TaskModel; public interface TaskMapper { String getTaskType(); List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException; }
6,707
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/LambdaTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * @author x-ultra * @deprecated {@link com.netflix.conductor.core.execution.tasks.Lambda} is also deprecated. Use * {@link com.netflix.conductor.core.execution.tasks.Inline} and so ${@link InlineTaskMapper} * will be used as a result. */ @Deprecated @Component public class LambdaTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(LambdaTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; public LambdaTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.LAMBDA.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in LambdaTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); Map<String, Object> taskInput = parametersUtils.getTaskInputV2( taskMapperContext.getWorkflowTask().getInputParameters(), workflowModel, taskId, taskDefinition); TaskModel lambdaTask = taskMapperContext.createTaskModel(); lambdaTask.setTaskType(TaskType.TASK_TYPE_LAMBDA); lambdaTask.setStartTime(System.currentTimeMillis()); lambdaTask.setInputData(taskInput); lambdaTask.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(lambdaTask); } }
6,708
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/HTTPTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#HTTP} to a {@link TaskModel} of type {@link TaskType#HTTP} with {@link * TaskModel.Status#SCHEDULED} */ @Component public class HTTPTaskMapper implements TaskMapper { private static final Logger LOGGER = LoggerFactory.getLogger(HTTPTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; @Autowired public HTTPTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.HTTP.name(); } /** * This method maps a {@link WorkflowTask} of type {@link TaskType#HTTP} to a {@link TaskModel} * in a {@link TaskModel.Status#SCHEDULED} state * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return a List with just one HTTP task * @throws TerminateWorkflowException In case if the task definition does not exist */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in HTTPTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); workflowTask.getInputParameters().put("asyncComplete", workflowTask.isAsyncComplete()); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); int retryCount = taskMapperContext.getRetryCount(); TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet(() -> metadataDAO.getTaskDef(workflowTask.getName())); Map<String, Object> input = parametersUtils.getTaskInputV2( workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); Boolean asynComplete = (Boolean) input.get("asyncComplete"); TaskModel httpTask = taskMapperContext.createTaskModel(); httpTask.setInputData(input); httpTask.getInputData().put("asyncComplete", asynComplete); httpTask.setStatus(TaskModel.Status.SCHEDULED); httpTask.setRetryCount(retryCount); httpTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); if (Objects.nonNull(taskDefinition)) { httpTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); httpTask.setRateLimitFrequencyInSeconds( taskDefinition.getRateLimitFrequencyInSeconds()); httpTask.setIsolationGroupId(taskDefinition.getIsolationGroupId()); httpTask.setExecutionNameSpace(taskDefinition.getExecutionNameSpace()); } return List.of(httpTask); } }
6,709
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.text.ParseException; import java.time.Duration; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Optional; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.execution.tasks.Wait; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; import static com.netflix.conductor.core.execution.tasks.Wait.DURATION_INPUT; import static com.netflix.conductor.core.execution.tasks.Wait.UNTIL_INPUT; import static com.netflix.conductor.core.utils.DateTimeUtils.parseDate; import static com.netflix.conductor.core.utils.DateTimeUtils.parseDuration; import static com.netflix.conductor.model.TaskModel.Status.FAILED_WITH_TERMINAL_ERROR; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#WAIT} to a {@link TaskModel} of type {@link Wait} with {@link * TaskModel.Status#IN_PROGRESS} */ @Component public class WaitTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(WaitTaskMapper.class); private final ParametersUtils parametersUtils; public WaitTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; } @Override public String getTaskType() { return TaskType.WAIT.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in WaitTaskMapper", taskMapperContext); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); Map<String, Object> waitTaskInput = parametersUtils.getTaskInputV2( taskMapperContext.getWorkflowTask().getInputParameters(), workflowModel, taskId, null); TaskModel waitTask = taskMapperContext.createTaskModel(); waitTask.setTaskType(TASK_TYPE_WAIT); waitTask.setInputData(waitTaskInput); waitTask.setStartTime(System.currentTimeMillis()); waitTask.setStatus(TaskModel.Status.IN_PROGRESS); setCallbackAfter(waitTask); return List.of(waitTask); } void setCallbackAfter(TaskModel task) { String duration = Optional.ofNullable(task.getInputData().get(DURATION_INPUT)).orElse("").toString(); String until = Optional.ofNullable(task.getInputData().get(UNTIL_INPUT)).orElse("").toString(); if (StringUtils.isNotBlank(duration) && StringUtils.isNotBlank(until)) { task.setReasonForIncompletion( "Both 'duration' and 'until' specified. Please provide only one input"); task.setStatus(FAILED_WITH_TERMINAL_ERROR); return; } if (StringUtils.isNotBlank(duration)) { Duration timeDuration = parseDuration(duration); long waitTimeout = System.currentTimeMillis() + (timeDuration.getSeconds() * 1000); task.setWaitTimeout(waitTimeout); long seconds = timeDuration.getSeconds(); task.setCallbackAfterSeconds(seconds); } else if (StringUtils.isNotBlank(until)) { try { Date expiryDate = parseDate(until); long timeInMS = expiryDate.getTime(); long now = System.currentTimeMillis(); long seconds = ((timeInMS - now) / 1000); if (seconds < 0) { seconds = 0; } task.setCallbackAfterSeconds(seconds); task.setWaitTimeout(timeInMS); } catch (ParseException parseException) { task.setReasonForIncompletion( "Invalid/Unsupported Wait Until format. Provided: " + until); task.setStatus(FAILED_WITH_TERMINAL_ERROR); } } else { // If there is no time duration specified then the WAIT task should wait forever task.setCallbackAfterSeconds(Integer.MAX_VALUE); } } }
6,710
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/NoopTaskMapper.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.model.TaskModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.*; @Component public class NoopTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(NoopTaskMapper.class); @Override public String getTaskType() { return TaskType.NOOP.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { logger.debug("TaskMapperContext {} in NoopTaskMapper", taskMapperContext); TaskModel task = taskMapperContext.createTaskModel(); task.setTaskType(TASK_TYPE_NOOP); task.setStartTime(System.currentTimeMillis()); task.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(task); } }
6,711
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW; @Component public class SubWorkflowTaskMapper implements TaskMapper { private static final Logger LOGGER = LoggerFactory.getLogger(SubWorkflowTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; public SubWorkflowTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.SUB_WORKFLOW.name(); } @SuppressWarnings("rawtypes") @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in SubWorkflowTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); // Check if there are sub workflow parameters, if not throw an exception, cannot initiate a // sub-workflow without workflow params SubWorkflowParams subWorkflowParams = getSubWorkflowParams(workflowTask); Map<String, Object> resolvedParams = getSubWorkflowInputParameters(workflowModel, subWorkflowParams); String subWorkflowName = resolvedParams.get("name").toString(); Integer subWorkflowVersion = getSubWorkflowVersion(resolvedParams, subWorkflowName); Object subWorkflowDefinition = resolvedParams.get("workflowDefinition"); Map subWorkflowTaskToDomain = null; Object uncheckedTaskToDomain = resolvedParams.get("taskToDomain"); if (uncheckedTaskToDomain instanceof Map) { subWorkflowTaskToDomain = (Map) uncheckedTaskToDomain; } TaskModel subWorkflowTask = taskMapperContext.createTaskModel(); subWorkflowTask.setTaskType(TASK_TYPE_SUB_WORKFLOW); subWorkflowTask.addInput("subWorkflowName", subWorkflowName); subWorkflowTask.addInput("subWorkflowVersion", subWorkflowVersion); subWorkflowTask.addInput("subWorkflowTaskToDomain", subWorkflowTaskToDomain); subWorkflowTask.addInput("subWorkflowDefinition", subWorkflowDefinition); subWorkflowTask.addInput("workflowInput", taskMapperContext.getTaskInput()); subWorkflowTask.setStatus(TaskModel.Status.SCHEDULED); subWorkflowTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); LOGGER.debug("SubWorkflowTask {} created to be Scheduled", subWorkflowTask); return List.of(subWorkflowTask); } @VisibleForTesting SubWorkflowParams getSubWorkflowParams(WorkflowTask workflowTask) { return Optional.ofNullable(workflowTask.getSubWorkflowParam()) .orElseThrow( () -> { String reason = String.format( "Task %s is defined as sub-workflow and is missing subWorkflowParams. " + "Please check the workflow definition", workflowTask.getName()); LOGGER.error(reason); return new TerminateWorkflowException(reason); }); } private Map<String, Object> getSubWorkflowInputParameters( WorkflowModel workflowModel, SubWorkflowParams subWorkflowParams) { Map<String, Object> params = new HashMap<>(); params.put("name", subWorkflowParams.getName()); Integer version = subWorkflowParams.getVersion(); if (version != null) { params.put("version", version); } Map<String, String> taskToDomain = subWorkflowParams.getTaskToDomain(); if (taskToDomain != null) { params.put("taskToDomain", taskToDomain); } params = parametersUtils.getTaskInputV2(params, workflowModel, null, null); // do not resolve params inside subworkflow definition Object subWorkflowDefinition = subWorkflowParams.getWorkflowDefinition(); if (subWorkflowDefinition != null) { params.put("workflowDefinition", subWorkflowDefinition); } return params; } private Integer getSubWorkflowVersion( Map<String, Object> resolvedParams, String subWorkflowName) { return Optional.ofNullable(resolvedParams.get("version")) .map(Object::toString) .map(Integer::parseInt) .orElseGet( () -> metadataDAO .getLatestWorkflowDef(subWorkflowName) .map(WorkflowDef::getVersion) .orElseThrow( () -> { String reason = String.format( "The Task %s defined as a sub-workflow has no workflow definition available ", subWorkflowName); LOGGER.error(reason); return new TerminateWorkflowException(reason); })); } }
6,712
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/SwitchTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.execution.evaluators.Evaluator; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#SWITCH} to a List {@link TaskModel} starting with Task of type {@link TaskType#SWITCH} * which is marked as IN_PROGRESS, followed by the list of {@link TaskModel} based on the case * expression evaluation in the Switch task. */ @Component public class SwitchTaskMapper implements TaskMapper { private static final Logger LOGGER = LoggerFactory.getLogger(SwitchTaskMapper.class); private final Map<String, Evaluator> evaluators; @Autowired public SwitchTaskMapper(Map<String, Evaluator> evaluators) { this.evaluators = evaluators; } @Override public String getTaskType() { return TaskType.SWITCH.name(); } /** * This method gets the list of tasks that need to scheduled when the task to scheduled is of * type {@link TaskType#SWITCH}. * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return List of tasks in the following order: * <ul> * <li>{@link TaskType#SWITCH} with {@link TaskModel.Status#IN_PROGRESS} * <li>List of tasks based on the evaluation of {@link WorkflowTask#getEvaluatorType()} * and {@link WorkflowTask#getExpression()} are scheduled. * <li>In the case of no matching {@link WorkflowTask#getEvaluatorType()}, workflow will * be terminated with error message. In case of no matching result after the * evaluation of the {@link WorkflowTask#getExpression()}, the {@link * WorkflowTask#getDefaultCase()} Tasks are scheduled. * </ul> */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in SwitchTaskMapper", taskMapperContext); List<TaskModel> tasksToBeScheduled = new LinkedList<>(); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); Map<String, Object> taskInput = taskMapperContext.getTaskInput(); int retryCount = taskMapperContext.getRetryCount(); // get the expression to be evaluated String evaluatorType = workflowTask.getEvaluatorType(); Evaluator evaluator = evaluators.get(evaluatorType); if (evaluator == null) { String errorMsg = String.format("No evaluator registered for type: %s", evaluatorType); LOGGER.error(errorMsg); throw new TerminateWorkflowException(errorMsg); } String evalResult = ""; try { evalResult = "" + evaluator.evaluate(workflowTask.getExpression(), taskInput); } catch (Exception exception) { TaskModel switchTask = taskMapperContext.createTaskModel(); switchTask.setTaskType(TaskType.TASK_TYPE_SWITCH); switchTask.setTaskDefName(TaskType.TASK_TYPE_SWITCH); switchTask.getInputData().putAll(taskInput); switchTask.setStartTime(System.currentTimeMillis()); switchTask.setStatus(TaskModel.Status.FAILED); switchTask.setReasonForIncompletion(exception.getMessage()); tasksToBeScheduled.add(switchTask); return tasksToBeScheduled; } // QQ why is the case value and the caseValue passed and caseOutput passes as the same ?? TaskModel switchTask = taskMapperContext.createTaskModel(); switchTask.setTaskType(TaskType.TASK_TYPE_SWITCH); switchTask.setTaskDefName(TaskType.TASK_TYPE_SWITCH); switchTask.getInputData().putAll(taskInput); switchTask.getInputData().put("case", evalResult); switchTask.addOutput("evaluationResult", List.of(evalResult)); switchTask.addOutput("selectedCase", evalResult); switchTask.setStartTime(System.currentTimeMillis()); switchTask.setStatus(TaskModel.Status.IN_PROGRESS); tasksToBeScheduled.add(switchTask); // get the list of tasks based on the evaluated expression List<WorkflowTask> selectedTasks = workflowTask.getDecisionCases().get(evalResult); // if the tasks returned are empty based on evaluated result, then get the default case if // there is one if (selectedTasks == null || selectedTasks.isEmpty()) { selectedTasks = workflowTask.getDefaultCase(); } // once there are selected tasks that need to proceeded as part of the switch, get the next // task to be scheduled by using the decider service if (selectedTasks != null && !selectedTasks.isEmpty()) { WorkflowTask selectedTask = selectedTasks.get(0); // Schedule the first task to be executed... // TODO break out this recursive call using function composition of what needs to be // done and then walk back the condition tree List<TaskModel> caseTasks = taskMapperContext .getDeciderService() .getTasksToBeScheduled( workflowModel, selectedTask, retryCount, taskMapperContext.getRetryTaskId()); tasksToBeScheduled.addAll(caseTasks); switchTask.getInputData().put("hasChildren", "true"); } return tasksToBeScheduled; } }
6,713
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/SetVariableTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.model.TaskModel; @Component public class SetVariableTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(SetVariableTaskMapper.class); @Override public String getTaskType() { return TaskType.SET_VARIABLE.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in SetVariableMapper", taskMapperContext); TaskModel varTask = taskMapperContext.createTaskModel(); varTask.setStartTime(System.currentTimeMillis()); varTask.setInputData(taskMapperContext.getTaskInput()); varTask.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(varTask); } }
6,714
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.script.ScriptException; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.events.ScriptEvaluator; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#DECISION} to a List {@link TaskModel} starting with Task of type {@link * TaskType#DECISION} which is marked as IN_PROGRESS, followed by the list of {@link TaskModel} * based on the case expression evaluation in the Decision task. * * @deprecated {@link com.netflix.conductor.core.execution.tasks.Decision} is also deprecated. Use * {@link com.netflix.conductor.core.execution.tasks.Switch} and so ${@link SwitchTaskMapper} * will be used as a result. */ @Deprecated @Component public class DecisionTaskMapper implements TaskMapper { private static final Logger LOGGER = LoggerFactory.getLogger(DecisionTaskMapper.class); @Override public String getTaskType() { return TaskType.DECISION.name(); } /** * This method gets the list of tasks that need to scheduled when the task to scheduled is of * type {@link TaskType#DECISION}. * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return List of tasks in the following order: * <ul> * <li>{@link TaskType#DECISION} with {@link TaskModel.Status#IN_PROGRESS} * <li>List of task based on the evaluation of {@link WorkflowTask#getCaseExpression()} * are scheduled. * <li>In case of no matching result after the evaluation of the {@link * WorkflowTask#getCaseExpression()}, the {@link WorkflowTask#getDefaultCase()} Tasks * are scheduled. * </ul> */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in DecisionTaskMapper", taskMapperContext); List<TaskModel> tasksToBeScheduled = new LinkedList<>(); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); Map<String, Object> taskInput = taskMapperContext.getTaskInput(); int retryCount = taskMapperContext.getRetryCount(); // get the expression to be evaluated String caseValue = getEvaluatedCaseValue(workflowTask, taskInput); // QQ why is the case value and the caseValue passed and caseOutput passes as the same ?? TaskModel decisionTask = taskMapperContext.createTaskModel(); decisionTask.setTaskType(TaskType.TASK_TYPE_DECISION); decisionTask.setTaskDefName(TaskType.TASK_TYPE_DECISION); decisionTask.addInput("case", caseValue); decisionTask.addOutput("caseOutput", Collections.singletonList(caseValue)); decisionTask.setStartTime(System.currentTimeMillis()); decisionTask.setStatus(TaskModel.Status.IN_PROGRESS); tasksToBeScheduled.add(decisionTask); // get the list of tasks based on the decision List<WorkflowTask> selectedTasks = workflowTask.getDecisionCases().get(caseValue); // if the tasks returned are empty based on evaluated case value, then get the default case // if there is one if (selectedTasks == null || selectedTasks.isEmpty()) { selectedTasks = workflowTask.getDefaultCase(); } // once there are selected tasks that need to proceeded as part of the decision, get the // next task to be scheduled by using the decider service if (selectedTasks != null && !selectedTasks.isEmpty()) { WorkflowTask selectedTask = selectedTasks.get(0); // Schedule the first task to be executed... // TODO break out this recursive call using function composition of what needs to be // done and then walk back the condition tree List<TaskModel> caseTasks = taskMapperContext .getDeciderService() .getTasksToBeScheduled( workflowModel, selectedTask, retryCount, taskMapperContext.getRetryTaskId()); tasksToBeScheduled.addAll(caseTasks); decisionTask.addInput("hasChildren", "true"); } return tasksToBeScheduled; } /** * This method evaluates the case expression of a decision task and returns a string * representation of the evaluated result. * * @param workflowTask: The decision task that has the case expression to be evaluated. * @param taskInput: the input which has the values that will be used in evaluating the case * expression. * @return A String representation of the evaluated result */ @VisibleForTesting String getEvaluatedCaseValue(WorkflowTask workflowTask, Map<String, Object> taskInput) { String expression = workflowTask.getCaseExpression(); String caseValue; if (StringUtils.isNotBlank(expression)) { LOGGER.debug("Case being evaluated using decision expression: {}", expression); try { // Evaluate the expression by using the Nashhorn based script evaluator Object returnValue = ScriptEvaluator.eval(expression, taskInput); caseValue = (returnValue == null) ? "null" : returnValue.toString(); } catch (ScriptException e) { String errorMsg = String.format("Error while evaluating script: %s", expression); LOGGER.error(errorMsg, e); throw new TerminateWorkflowException(errorMsg); } } else { // In case of no case expression, get the caseValueParam and treat it as a string // representation of caseValue LOGGER.debug( "No Expression available on the decision task, case value being assigned as param name"); String paramName = workflowTask.getCaseValueParam(); caseValue = "" + taskInput.get(paramName); } return caseValue; } }
6,715
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#JOIN} to a {@link TaskModel} of type {@link TaskType#JOIN} */ @Component public class JoinTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(JoinTaskMapper.class); @Override public String getTaskType() { return TaskType.JOIN.name(); } /** * This method maps {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#JOIN} to a {@link TaskModel} of type {@link TaskType#JOIN} with a status of {@link * TaskModel.Status#IN_PROGRESS} * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return A {@link TaskModel} of type {@link TaskType#JOIN} in a List */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in JoinTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); Map<String, Object> joinInput = new HashMap<>(); joinInput.put("joinOn", workflowTask.getJoinOn()); TaskModel joinTask = taskMapperContext.createTaskModel(); joinTask.setTaskType(TaskType.TASK_TYPE_JOIN); joinTask.setTaskDefName(TaskType.TASK_TYPE_JOIN); joinTask.setStartTime(System.currentTimeMillis()); joinTask.setInputData(joinInput); joinTask.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(joinTask); } }
6,716
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/ExclusiveJoinTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.model.TaskModel; @Component public class ExclusiveJoinTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(ExclusiveJoinTaskMapper.class); @Override public String getTaskType() { return TaskType.EXCLUSIVE_JOIN.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in ExclusiveJoinTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); Map<String, Object> joinInput = new HashMap<>(); joinInput.put("joinOn", workflowTask.getJoinOn()); if (workflowTask.getDefaultExclusiveJoinTask() != null) { joinInput.put("defaultExclusiveJoinTask", workflowTask.getDefaultExclusiveJoinTask()); } TaskModel joinTask = taskMapperContext.createTaskModel(); joinTask.setTaskType(TaskType.TASK_TYPE_EXCLUSIVE_JOIN); joinTask.setTaskDefName(TaskType.TASK_TYPE_EXCLUSIVE_JOIN); joinTask.setStartTime(System.currentTimeMillis()); joinTask.setInputData(joinInput); joinTask.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(joinTask); } }
6,717
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/StartWorkflowTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.model.TaskModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.START_WORKFLOW; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_START_WORKFLOW; @Component public class StartWorkflowTaskMapper implements TaskMapper { private static final Logger LOGGER = LoggerFactory.getLogger(StartWorkflowTaskMapper.class); @Override public String getTaskType() { return START_WORKFLOW.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); TaskModel startWorkflowTask = taskMapperContext.createTaskModel(); startWorkflowTask.setTaskType(TASK_TYPE_START_WORKFLOW); startWorkflowTask.addInput(taskMapperContext.getTaskInput()); startWorkflowTask.setStatus(TaskModel.Status.SCHEDULED); startWorkflowTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); LOGGER.debug("{} created", startWorkflowTask); return List.of(startWorkflowTask); } }
6,718
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#SIMPLE} to a {@link TaskModel} with status {@link TaskModel.Status#SCHEDULED}. * <b>NOTE:</b> There is not type defined for simples task. */ @Component public class SimpleTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(SimpleTaskMapper.class); private final ParametersUtils parametersUtils; public SimpleTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; } @Override public String getTaskType() { return TaskType.SIMPLE.name(); } /** * This method maps a {@link WorkflowTask} of type {@link TaskType#SIMPLE} to a {@link * TaskModel} * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @throws TerminateWorkflowException In case if the task definition does not exist * @return a List with just one simple task */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in SimpleTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); int retryCount = taskMapperContext.getRetryCount(); String retriedTaskId = taskMapperContext.getRetryTaskId(); TaskDef taskDefinition = Optional.ofNullable(workflowTask.getTaskDefinition()) .orElseThrow( () -> { String reason = String.format( "Invalid task. Task %s does not have a definition", workflowTask.getName()); return new TerminateWorkflowException(reason); }); Map<String, Object> input = parametersUtils.getTaskInput( workflowTask.getInputParameters(), workflowModel, taskDefinition, taskMapperContext.getTaskId()); TaskModel simpleTask = taskMapperContext.createTaskModel(); simpleTask.setTaskType(workflowTask.getName()); simpleTask.setStartDelayInSeconds(workflowTask.getStartDelay()); simpleTask.setInputData(input); simpleTask.setStatus(TaskModel.Status.SCHEDULED); simpleTask.setRetryCount(retryCount); simpleTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); simpleTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds()); simpleTask.setRetriedTaskId(retriedTaskId); simpleTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); simpleTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds()); return List.of(simpleTask); } }
6,719
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_EVENT; @Component public class EventTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(EventTaskMapper.class); private final ParametersUtils parametersUtils; @Autowired public EventTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; } @Override public String getTaskType() { return TaskType.EVENT.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in EventTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); workflowTask.getInputParameters().put("sink", workflowTask.getSink()); workflowTask.getInputParameters().put("asyncComplete", workflowTask.isAsyncComplete()); Map<String, Object> eventTaskInput = parametersUtils.getTaskInputV2( workflowTask.getInputParameters(), workflowModel, taskId, null); String sink = (String) eventTaskInput.get("sink"); Boolean asynComplete = (Boolean) eventTaskInput.get("asyncComplete"); TaskModel eventTask = taskMapperContext.createTaskModel(); eventTask.setTaskType(TASK_TYPE_EVENT); eventTask.setStatus(TaskModel.Status.SCHEDULED); eventTask.setInputData(eventTaskInput); eventTask.getInputData().put("sink", sink); eventTask.getInputData().put("asyncComplete", asynComplete); return List.of(eventTask); } }
6,720
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#FORK_JOIN} to a LinkedList of {@link TaskModel} beginning with a completed {@link * TaskType#TASK_TYPE_FORK}, followed by the user defined fork tasks */ @Component public class ForkJoinTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(ForkJoinTaskMapper.class); @Override public String getTaskType() { return TaskType.FORK_JOIN.name(); } /** * This method gets the list of tasks that need to scheduled when the task to scheduled is of * type {@link TaskType#FORK_JOIN}. * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return List of tasks in the following order: * * <ul> * <li>{@link TaskType#TASK_TYPE_FORK} with {@link TaskModel.Status#COMPLETED} * <li>Might be any kind of task, but in most cases is a UserDefinedTask with {@link * TaskModel.Status#SCHEDULED} * </ul> * * @throws TerminateWorkflowException When the task after {@link TaskType#FORK_JOIN} is not a * {@link TaskType#JOIN} */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in ForkJoinTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); Map<String, Object> taskInput = taskMapperContext.getTaskInput(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); int retryCount = taskMapperContext.getRetryCount(); List<TaskModel> tasksToBeScheduled = new LinkedList<>(); TaskModel forkTask = taskMapperContext.createTaskModel(); forkTask.setTaskType(TaskType.TASK_TYPE_FORK); forkTask.setTaskDefName(TaskType.TASK_TYPE_FORK); long epochMillis = System.currentTimeMillis(); forkTask.setStartTime(epochMillis); forkTask.setEndTime(epochMillis); forkTask.setInputData(taskInput); forkTask.setStatus(TaskModel.Status.COMPLETED); tasksToBeScheduled.add(forkTask); List<List<WorkflowTask>> forkTasks = workflowTask.getForkTasks(); for (List<WorkflowTask> wfts : forkTasks) { WorkflowTask wft = wfts.get(0); List<TaskModel> tasks2 = taskMapperContext .getDeciderService() .getTasksToBeScheduled(workflowModel, wft, retryCount); tasksToBeScheduled.addAll(tasks2); } WorkflowTask joinWorkflowTask = workflowModel .getWorkflowDefinition() .getNextTask(workflowTask.getTaskReferenceName()); if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) { throw new TerminateWorkflowException( "Fork task definition is not followed by a join task. Check the blueprint"); } List<TaskModel> joinTask = taskMapperContext .getDeciderService() .getTasksToBeScheduled(workflowModel, joinWorkflowTask, retryCount); tasksToBeScheduled.addAll(joinTask); return tasksToBeScheduled; } }
6,721
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#FORK_JOIN_DYNAMIC} to a LinkedList of {@link TaskModel} beginning with a {@link * TaskType#TASK_TYPE_FORK}, followed by the user defined dynamic tasks and a {@link TaskType#JOIN} * at the end */ @Component public class ForkJoinDynamicTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(ForkJoinDynamicTaskMapper.class); private final IDGenerator idGenerator; private final ParametersUtils parametersUtils; private final ObjectMapper objectMapper; private final MetadataDAO metadataDAO; private static final TypeReference<List<WorkflowTask>> ListOfWorkflowTasks = new TypeReference<>() {}; @Autowired public ForkJoinDynamicTaskMapper( IDGenerator idGenerator, ParametersUtils parametersUtils, ObjectMapper objectMapper, MetadataDAO metadataDAO) { this.idGenerator = idGenerator; this.parametersUtils = parametersUtils; this.objectMapper = objectMapper; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.FORK_JOIN_DYNAMIC.name(); } /** * This method gets the list of tasks that need to scheduled when the task to scheduled is of * type {@link TaskType#FORK_JOIN_DYNAMIC}. Creates a Fork Task, followed by the Dynamic tasks * and a final JOIN task. * * <p>The definitions of the dynamic forks that need to be scheduled are available in the {@link * WorkflowTask#getInputParameters()} which are accessed using the {@link * TaskMapperContext#getWorkflowTask()}. The dynamic fork task definitions are referred by a key * value either by {@link WorkflowTask#getDynamicForkTasksParam()} or by {@link * WorkflowTask#getDynamicForkJoinTasksParam()} When creating the list of tasks to be scheduled * a set of preconditions are validated: * * <ul> * <li>If the input parameter representing the Dynamic fork tasks is available as part of * {@link WorkflowTask#getDynamicForkTasksParam()} then the input for the dynamic task is * validated to be a map by using {@link WorkflowTask#getDynamicForkTasksInputParamName()} * <li>If the input parameter representing the Dynamic fork tasks is available as part of * {@link WorkflowTask#getDynamicForkJoinTasksParam()} then the input for the dynamic * tasks is available in the payload of the tasks definition. * <li>A check is performed that the next following task in the {@link WorkflowDef} is a * {@link TaskType#JOIN} * </ul> * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return List of tasks in the following order: * <ul> * <li>{@link TaskType#TASK_TYPE_FORK} with {@link TaskModel.Status#COMPLETED} * <li>Might be any kind of task, but this is most cases is a UserDefinedTask with {@link * TaskModel.Status#SCHEDULED} * <li>{@link TaskType#JOIN} with {@link TaskModel.Status#IN_PROGRESS} * </ul> * * @throws TerminateWorkflowException In case of: * <ul> * <li>When the task after {@link TaskType#FORK_JOIN_DYNAMIC} is not a {@link * TaskType#JOIN} * <li>When the input parameters for the dynamic tasks are not of type {@link Map} * </ul> */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in ForkJoinDynamicTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); int retryCount = taskMapperContext.getRetryCount(); List<TaskModel> mappedTasks = new LinkedList<>(); // Get the list of dynamic tasks and the input for the tasks Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> workflowTasksAndInputPair = Optional.ofNullable(workflowTask.getDynamicForkTasksParam()) .map( dynamicForkTaskParam -> getDynamicForkTasksAndInput( workflowTask, workflowModel, dynamicForkTaskParam)) .orElseGet( () -> getDynamicForkJoinTasksAndInput(workflowTask, workflowModel)); List<WorkflowTask> dynForkTasks = workflowTasksAndInputPair.getLeft(); Map<String, Map<String, Object>> tasksInput = workflowTasksAndInputPair.getRight(); // Create Fork Task which needs to be followed by the dynamic tasks TaskModel forkDynamicTask = createDynamicForkTask(taskMapperContext, dynForkTasks); mappedTasks.add(forkDynamicTask); List<String> joinOnTaskRefs = new LinkedList<>(); // Add each dynamic task to the mapped tasks and also get the last dynamic task in the list, // which indicates that the following task after that needs to be a join task for (WorkflowTask dynForkTask : dynForkTasks) { // TODO this is a cyclic dependency, break it out using function // composition List<TaskModel> forkedTasks = taskMapperContext .getDeciderService() .getTasksToBeScheduled(workflowModel, dynForkTask, retryCount); // It's an error state if no forkedTasks can be decided upon. In the cases where we've // seen // this happen is when a dynamic task is attempting to be created here, but a task with // the // same reference name has already been created in the Workflow. if (forkedTasks == null || forkedTasks.isEmpty()) { Optional<String> existingTaskRefName = workflowModel.getTasks().stream() .filter( runningTask -> runningTask .getStatus() .equals( TaskModel.Status .IN_PROGRESS) || runningTask.getStatus().isTerminal()) .map(TaskModel::getReferenceTaskName) .filter( refTaskName -> refTaskName.equals( dynForkTask.getTaskReferenceName())) .findAny(); // Construct an informative error message String terminateMessage = "No dynamic tasks could be created for the Workflow: " + workflowModel.toShortString() + ", Dynamic Fork Task: " + dynForkTask; if (existingTaskRefName.isPresent()) { terminateMessage += "Attempted to create a duplicate task reference name: " + existingTaskRefName.get(); } throw new TerminateWorkflowException(terminateMessage); } for (TaskModel forkedTask : forkedTasks) { try { Map<String, Object> forkedTaskInput = tasksInput.get(forkedTask.getReferenceTaskName()); forkedTask.addInput(forkedTaskInput); } catch (Exception e) { String reason = String.format( "Tasks could not be dynamically forked due to invalid input: %s", e.getMessage()); throw new TerminateWorkflowException(reason); } } mappedTasks.addAll(forkedTasks); // Get the last of the dynamic tasks so that the join can be performed once this task is // done TaskModel last = forkedTasks.get(forkedTasks.size() - 1); joinOnTaskRefs.add(last.getReferenceTaskName()); } // From the workflow definition get the next task and make sure that it is a JOIN task. // The dynamic fork tasks need to be followed by a join task WorkflowTask joinWorkflowTask = workflowModel .getWorkflowDefinition() .getNextTask(workflowTask.getTaskReferenceName()); if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) { throw new TerminateWorkflowException( "Dynamic join definition is not followed by a join task. Check the workflow definition."); } // Create Join task HashMap<String, Object> joinInput = new HashMap<>(); joinInput.put("joinOn", joinOnTaskRefs); TaskModel joinTask = createJoinTask(workflowModel, joinWorkflowTask, joinInput); mappedTasks.add(joinTask); return mappedTasks; } /** * This method creates a FORK task and adds the list of dynamic fork tasks keyed by * "forkedTaskDefs" and their names keyed by "forkedTasks" into {@link TaskModel#getInputData()} * * @param taskMapperContext: The {@link TaskMapperContext} which wraps workflowTask, workflowDef * and workflowModel * @param dynForkTasks: The list of dynamic forked tasks, the reference names of these tasks * will be added to the forkDynamicTask * @return A new instance of {@link TaskModel} representing a {@link TaskType#TASK_TYPE_FORK} */ @VisibleForTesting TaskModel createDynamicForkTask( TaskMapperContext taskMapperContext, List<WorkflowTask> dynForkTasks) { TaskModel forkDynamicTask = taskMapperContext.createTaskModel(); forkDynamicTask.setTaskType(TaskType.TASK_TYPE_FORK); forkDynamicTask.setTaskDefName(TaskType.TASK_TYPE_FORK); forkDynamicTask.setStartTime(System.currentTimeMillis()); forkDynamicTask.setEndTime(System.currentTimeMillis()); List<String> forkedTaskNames = dynForkTasks.stream() .map(WorkflowTask::getTaskReferenceName) .collect(Collectors.toList()); forkDynamicTask.getInputData().put("forkedTasks", forkedTaskNames); forkDynamicTask .getInputData() .put( "forkedTaskDefs", dynForkTasks); // TODO: Remove this parameter in the later releases forkDynamicTask.setStatus(TaskModel.Status.COMPLETED); return forkDynamicTask; } /** * This method creates a JOIN task that is used in the {@link * this#getMappedTasks(TaskMapperContext)} at the end to add a join task to be scheduled after * all the fork tasks * * @param workflowModel: A instance of the {@link WorkflowModel} which represents the workflow * being executed. * @param joinWorkflowTask: A instance of {@link WorkflowTask} which is of type {@link * TaskType#JOIN} * @param joinInput: The input which is set in the {@link TaskModel#setInputData(Map)} * @return a new instance of {@link TaskModel} representing a {@link TaskType#JOIN} */ @VisibleForTesting TaskModel createJoinTask( WorkflowModel workflowModel, WorkflowTask joinWorkflowTask, HashMap<String, Object> joinInput) { TaskModel joinTask = new TaskModel(); joinTask.setTaskType(TaskType.TASK_TYPE_JOIN); joinTask.setTaskDefName(TaskType.TASK_TYPE_JOIN); joinTask.setReferenceTaskName(joinWorkflowTask.getTaskReferenceName()); joinTask.setWorkflowInstanceId(workflowModel.getWorkflowId()); joinTask.setWorkflowType(workflowModel.getWorkflowName()); joinTask.setCorrelationId(workflowModel.getCorrelationId()); joinTask.setScheduledTime(System.currentTimeMillis()); joinTask.setStartTime(System.currentTimeMillis()); joinTask.setInputData(joinInput); joinTask.setTaskId(idGenerator.generate()); joinTask.setStatus(TaskModel.Status.IN_PROGRESS); joinTask.setWorkflowTask(joinWorkflowTask); joinTask.setWorkflowPriority(workflowModel.getPriority()); return joinTask; } /** * This method is used to get the List of dynamic workflow tasks and their input based on the * {@link WorkflowTask#getDynamicForkTasksParam()} * * @param workflowTask: The Task of type FORK_JOIN_DYNAMIC that needs to scheduled, which has * the input parameters * @param workflowModel: The instance of the {@link WorkflowModel} which represents the workflow * being executed. * @param dynamicForkTaskParam: The key representing the dynamic fork join json payload which is * available in {@link WorkflowTask#getInputParameters()} * @return a {@link Pair} representing the list of dynamic fork tasks in {@link Pair#getLeft()} * and the input for the dynamic fork tasks in {@link Pair#getRight()} * @throws TerminateWorkflowException : In case of input parameters of the dynamic fork tasks * not represented as {@link Map} */ @SuppressWarnings("unchecked") @VisibleForTesting Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> getDynamicForkTasksAndInput( WorkflowTask workflowTask, WorkflowModel workflowModel, String dynamicForkTaskParam) throws TerminateWorkflowException { Map<String, Object> input = parametersUtils.getTaskInput( workflowTask.getInputParameters(), workflowModel, null, null); Object dynamicForkTasksJson = input.get(dynamicForkTaskParam); List<WorkflowTask> dynamicForkWorkflowTasks = objectMapper.convertValue(dynamicForkTasksJson, ListOfWorkflowTasks); if (dynamicForkWorkflowTasks == null) { dynamicForkWorkflowTasks = new ArrayList<>(); } for (WorkflowTask dynamicForkWorkflowTask : dynamicForkWorkflowTasks) { if ((dynamicForkWorkflowTask.getTaskDefinition() == null) && StringUtils.isNotBlank(dynamicForkWorkflowTask.getName())) { dynamicForkWorkflowTask.setTaskDefinition( metadataDAO.getTaskDef(dynamicForkWorkflowTask.getName())); } } Object dynamicForkTasksInput = input.get(workflowTask.getDynamicForkTasksInputParamName()); if (!(dynamicForkTasksInput instanceof Map)) { throw new TerminateWorkflowException( "Input to the dynamically forked tasks is not a map -> expecting a map of K,V but found " + dynamicForkTasksInput); } return new ImmutablePair<>( dynamicForkWorkflowTasks, (Map<String, Map<String, Object>>) dynamicForkTasksInput); } /** * This method is used to get the List of dynamic workflow tasks and their input based on the * {@link WorkflowTask#getDynamicForkJoinTasksParam()} * * <p><b>NOTE:</b> This method is kept for legacy reasons, new workflows should use the {@link * #getDynamicForkTasksAndInput} * * @param workflowTask: The Task of type FORK_JOIN_DYNAMIC that needs to scheduled, which has * the input parameters * @param workflowModel: The instance of the {@link WorkflowModel} which represents the workflow * being executed. * @return {@link Pair} representing the list of dynamic fork tasks in {@link Pair#getLeft()} * and the input for the dynamic fork tasks in {@link Pair#getRight()} * @throws TerminateWorkflowException : In case of the {@link WorkflowTask#getInputParameters()} * does not have a payload that contains the list of the dynamic tasks */ @VisibleForTesting Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> getDynamicForkJoinTasksAndInput( WorkflowTask workflowTask, WorkflowModel workflowModel) throws TerminateWorkflowException { String dynamicForkJoinTaskParam = workflowTask.getDynamicForkJoinTasksParam(); Map<String, Object> input = parametersUtils.getTaskInput( workflowTask.getInputParameters(), workflowModel, null, null); Object paramValue = input.get(dynamicForkJoinTaskParam); DynamicForkJoinTaskList dynamicForkJoinTaskList = objectMapper.convertValue(paramValue, DynamicForkJoinTaskList.class); if (dynamicForkJoinTaskList == null) { String reason = String.format( "Dynamic tasks could not be created. The value of %s from task's input %s has no dynamic tasks to be scheduled", dynamicForkJoinTaskParam, input); LOGGER.error(reason); throw new TerminateWorkflowException(reason); } Map<String, Map<String, Object>> dynamicForkJoinTasksInput = new HashMap<>(); List<WorkflowTask> dynamicForkJoinWorkflowTasks = dynamicForkJoinTaskList.getDynamicTasks().stream() .peek( dynamicForkJoinTask -> dynamicForkJoinTasksInput.put( dynamicForkJoinTask.getReferenceName(), dynamicForkJoinTask .getInput())) // TODO create a custom pair // collector .map( dynamicForkJoinTask -> { WorkflowTask dynamicForkJoinWorkflowTask = new WorkflowTask(); dynamicForkJoinWorkflowTask.setTaskReferenceName( dynamicForkJoinTask.getReferenceName()); dynamicForkJoinWorkflowTask.setName( dynamicForkJoinTask.getTaskName()); dynamicForkJoinWorkflowTask.setType( dynamicForkJoinTask.getType()); if (dynamicForkJoinWorkflowTask.getTaskDefinition() == null && StringUtils.isNotBlank( dynamicForkJoinWorkflowTask.getName())) { dynamicForkJoinWorkflowTask.setTaskDefinition( metadataDAO.getTaskDef( dynamicForkJoinTask.getTaskName())); } return dynamicForkJoinWorkflowTask; }) .collect(Collectors.toCollection(LinkedList::new)); return new ImmutablePair<>(dynamicForkJoinWorkflowTasks, dynamicForkJoinTasksInput); } }
6,722
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/HumanTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.execution.tasks.Human; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_HUMAN; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#HUMAN} to a {@link TaskModel} of type {@link Human} with {@link * TaskModel.Status#IN_PROGRESS} */ @Component public class HumanTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(HumanTaskMapper.class); private final ParametersUtils parametersUtils; public HumanTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; } @Override public String getTaskType() { return TaskType.HUMAN.name(); } @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); Map<String, Object> humanTaskInput = parametersUtils.getTaskInputV2( taskMapperContext.getWorkflowTask().getInputParameters(), workflowModel, taskId, null); TaskModel humanTask = taskMapperContext.createTaskModel(); humanTask.setTaskType(TASK_TYPE_HUMAN); humanTask.setInputData(humanTaskInput); humanTask.setStartTime(System.currentTimeMillis()); humanTask.setStatus(TaskModel.Status.IN_PROGRESS); return List.of(humanTask); } }
6,723
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#DYNAMIC} to a {@link TaskModel} based on definition derived from the dynamic task name * defined in {@link WorkflowTask#getInputParameters()} */ @Component public class DynamicTaskMapper implements TaskMapper { private static final Logger LOGGER = LoggerFactory.getLogger(DynamicTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; @Autowired public DynamicTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.DYNAMIC.name(); } /** * This method maps a dynamic task to a {@link TaskModel} based on the input params * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return A {@link List} that contains a single {@link TaskModel} with a {@link * TaskModel.Status#SCHEDULED} */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in DynamicTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); Map<String, Object> taskInput = taskMapperContext.getTaskInput(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); int retryCount = taskMapperContext.getRetryCount(); String retriedTaskId = taskMapperContext.getRetryTaskId(); String taskNameParam = workflowTask.getDynamicTaskNameParam(); String taskName = getDynamicTaskName(taskInput, taskNameParam); workflowTask.setName(taskName); TaskDef taskDefinition = getDynamicTaskDefinition(workflowTask); workflowTask.setTaskDefinition(taskDefinition); Map<String, Object> input = parametersUtils.getTaskInput( workflowTask.getInputParameters(), workflowModel, taskDefinition, taskMapperContext.getTaskId()); // IMPORTANT: The WorkflowTask that is inside TaskMapperContext is changed above // createTaskModel() must be called here so the changes are reflected in the created // TaskModel TaskModel dynamicTask = taskMapperContext.createTaskModel(); dynamicTask.setStartDelayInSeconds(workflowTask.getStartDelay()); dynamicTask.setInputData(input); dynamicTask.setStatus(TaskModel.Status.SCHEDULED); dynamicTask.setRetryCount(retryCount); dynamicTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); dynamicTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds()); dynamicTask.setTaskType(taskName); dynamicTask.setRetriedTaskId(retriedTaskId); dynamicTask.setWorkflowPriority(workflowModel.getPriority()); return Collections.singletonList(dynamicTask); } /** * Helper method that looks into the input params and returns the dynamic task name * * @param taskInput: a map which contains different input parameters and also contains the * mapping between the dynamic task name param and the actual name representing the dynamic * task * @param taskNameParam: the key that is used to look up the dynamic task name. * @return The name of the dynamic task * @throws TerminateWorkflowException : In case is there is no value dynamic task name in the * input parameters. */ @VisibleForTesting String getDynamicTaskName(Map<String, Object> taskInput, String taskNameParam) throws TerminateWorkflowException { return Optional.ofNullable(taskInput.get(taskNameParam)) .map(String::valueOf) .orElseThrow( () -> { String reason = String.format( "Cannot map a dynamic task based on the parameter and input. " + "Parameter= %s, input= %s", taskNameParam, taskInput); return new TerminateWorkflowException(reason); }); } /** * This method gets the TaskDefinition for a specific {@link WorkflowTask} * * @param workflowTask: An instance of {@link WorkflowTask} which has the name of the using * which the {@link TaskDef} can be retrieved. * @return An instance of TaskDefinition * @throws TerminateWorkflowException : in case of no workflow definition available */ @VisibleForTesting TaskDef getDynamicTaskDefinition(WorkflowTask workflowTask) throws TerminateWorkflowException { // TODO this is a common pattern in code base can // be moved to DAO return Optional.ofNullable(workflowTask.getTaskDefinition()) .orElseGet( () -> Optional.ofNullable(metadataDAO.getTaskDef(workflowTask.getName())) .orElseThrow( () -> { String reason = String.format( "Invalid task specified. Cannot find task by name %s in the task definitions", workflowTask.getName()); return new TerminateWorkflowException(reason); })); } }
6,724
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#USER_DEFINED} to a {@link TaskModel} of type {@link TaskType#USER_DEFINED} with {@link * TaskModel.Status#SCHEDULED} */ @Component public class UserDefinedTaskMapper implements TaskMapper { public static final Logger LOGGER = LoggerFactory.getLogger(UserDefinedTaskMapper.class); private final ParametersUtils parametersUtils; private final MetadataDAO metadataDAO; public UserDefinedTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.metadataDAO = metadataDAO; } @Override public String getTaskType() { return TaskType.USER_DEFINED.name(); } /** * This method maps a {@link WorkflowTask} of type {@link TaskType#USER_DEFINED} to a {@link * TaskModel} in a {@link TaskModel.Status#SCHEDULED} state * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return a List with just one User defined task * @throws TerminateWorkflowException In case if the task definition does not exist */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { LOGGER.debug("TaskMapperContext {} in UserDefinedTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); String taskId = taskMapperContext.getTaskId(); int retryCount = taskMapperContext.getRetryCount(); TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet( () -> Optional.ofNullable( metadataDAO.getTaskDef( workflowTask.getName())) .orElseThrow( () -> { String reason = String.format( "Invalid task specified. Cannot find task by name %s in the task definitions", workflowTask.getName()); return new TerminateWorkflowException( reason); })); Map<String, Object> input = parametersUtils.getTaskInputV2( workflowTask.getInputParameters(), workflowModel, taskId, taskDefinition); TaskModel userDefinedTask = taskMapperContext.createTaskModel(); userDefinedTask.setInputData(input); userDefinedTask.setStatus(TaskModel.Status.SCHEDULED); userDefinedTask.setRetryCount(retryCount); userDefinedTask.setCallbackAfterSeconds(workflowTask.getStartDelay()); userDefinedTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); userDefinedTask.setRateLimitFrequencyInSeconds( taskDefinition.getRateLimitFrequencyInSeconds()); return List.of(userDefinedTask); } }
6,725
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/mapper/DoWhileTaskMapper.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#DO_WHILE} to a {@link TaskModel} of type {@link TaskType#DO_WHILE} */ @Component public class DoWhileTaskMapper implements TaskMapper { private static final Logger LOGGER = LoggerFactory.getLogger(DoWhileTaskMapper.class); private final MetadataDAO metadataDAO; private final ParametersUtils parametersUtils; @Autowired public DoWhileTaskMapper(MetadataDAO metadataDAO, ParametersUtils parametersUtils) { this.metadataDAO = metadataDAO; this.parametersUtils = parametersUtils; } @Override public String getTaskType() { return TaskType.DO_WHILE.name(); } /** * This method maps {@link TaskMapper} to map a {@link WorkflowTask} of type {@link * TaskType#DO_WHILE} to a {@link TaskModel} of type {@link TaskType#DO_WHILE} with a status of * {@link TaskModel.Status#IN_PROGRESS} * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link * WorkflowDef}, {@link WorkflowModel} and a string representation of the TaskId * @return: A {@link TaskModel} of type {@link TaskType#DO_WHILE} in a List */ @Override public List<TaskModel> getMappedTasks(TaskMapperContext taskMapperContext) { LOGGER.debug("TaskMapperContext {} in DoWhileTaskMapper", taskMapperContext); WorkflowTask workflowTask = taskMapperContext.getWorkflowTask(); WorkflowModel workflowModel = taskMapperContext.getWorkflowModel(); TaskModel task = workflowModel.getTaskByRefName(workflowTask.getTaskReferenceName()); if (task != null && task.getStatus().isTerminal()) { // Since loopTask is already completed no need to schedule task again. return List.of(); } TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()) .orElseGet( () -> Optional.ofNullable( metadataDAO.getTaskDef( workflowTask.getName())) .orElseGet(TaskDef::new)); TaskModel doWhileTask = taskMapperContext.createTaskModel(); doWhileTask.setTaskType(TaskType.TASK_TYPE_DO_WHILE); doWhileTask.setStatus(TaskModel.Status.IN_PROGRESS); doWhileTask.setStartTime(System.currentTimeMillis()); doWhileTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency()); doWhileTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds()); doWhileTask.setRetryCount(taskMapperContext.getRetryCount()); Map<String, Object> taskInput = parametersUtils.getTaskInputV2( workflowTask.getInputParameters(), workflowModel, doWhileTask.getTaskId(), taskDefinition); doWhileTask.setInputData(taskInput); return List.of(doWhileTask); } }
6,726
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/evaluators/JavascriptEvaluator.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.evaluators; import javax.script.ScriptException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.core.events.ScriptEvaluator; import com.netflix.conductor.core.exception.TerminateWorkflowException; @Component(JavascriptEvaluator.NAME) public class JavascriptEvaluator implements Evaluator { public static final String NAME = "javascript"; private static final Logger LOGGER = LoggerFactory.getLogger(JavascriptEvaluator.class); @Override public Object evaluate(String expression, Object input) { LOGGER.debug("Javascript evaluator -- expression: {}", expression); try { // Evaluate the expression by using the Javascript evaluation engine. Object result = ScriptEvaluator.eval(expression, input); LOGGER.debug("Javascript evaluator -- result: {}", result); return result; } catch (ScriptException e) { LOGGER.error("Error while evaluating script: {}", expression, e); throw new TerminateWorkflowException(e.getMessage()); } } }
6,727
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/evaluators/ValueParamEvaluator.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.evaluators; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.core.exception.TerminateWorkflowException; @Component(ValueParamEvaluator.NAME) public class ValueParamEvaluator implements Evaluator { public static final String NAME = "value-param"; private static final Logger LOGGER = LoggerFactory.getLogger(ValueParamEvaluator.class); @SuppressWarnings("unchecked") @Override public Object evaluate(String expression, Object input) { LOGGER.debug("ValueParam evaluator -- evaluating: {}", expression); if (input instanceof Map) { Object result = ((Map<String, Object>) input).get(expression); LOGGER.debug("ValueParam evaluator -- result: {}", result); return result; } else { String errorMsg = String.format("Input has to be a JSON object: %s", input.getClass()); LOGGER.error(errorMsg); throw new TerminateWorkflowException(errorMsg); } } }
6,728
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/execution/evaluators/Evaluator.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.evaluators; public interface Evaluator { /** * Evaluate the expression using the inputs provided, if required. Evaluation of the expression * depends on the type of the evaluator. * * @param expression Expression to be evaluated. * @param input Input object to the evaluator to help evaluate the expression. * @return Return the evaluation result. */ Object evaluate(String expression, Object input); }
6,729
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.index; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.dao.IndexDAO; /** * Dummy implementation of {@link IndexDAO} which does nothing. Nothing is ever indexed, and no * results are ever returned. */ public class NoopIndexDAO implements IndexDAO { @Override public void setup() {} @Override public void indexWorkflow(WorkflowSummary workflowSummary) {} @Override public CompletableFuture<Void> asyncIndexWorkflow(WorkflowSummary workflowSummary) { return CompletableFuture.completedFuture(null); } @Override public void indexTask(TaskSummary taskSummary) {} @Override public CompletableFuture<Void> asyncIndexTask(TaskSummary taskSummary) { return CompletableFuture.completedFuture(null); } @Override public SearchResult<String> searchWorkflows( String query, String freeText, int start, int count, List<String> sort) { return new SearchResult<>(0, Collections.emptyList()); } @Override public SearchResult<WorkflowSummary> searchWorkflowSummary( String query, String freeText, int start, int count, List<String> sort) { return new SearchResult<>(0, Collections.emptyList()); } @Override public SearchResult<String> searchTasks( String query, String freeText, int start, int count, List<String> sort) { return new SearchResult<>(0, Collections.emptyList()); } @Override public SearchResult<TaskSummary> searchTaskSummary( String query, String freeText, int start, int count, List<String> sort) { return new SearchResult<>(0, Collections.emptyList()); } @Override public void removeWorkflow(String workflowId) {} @Override public CompletableFuture<Void> asyncRemoveWorkflow(String workflowId) { return CompletableFuture.completedFuture(null); } @Override public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {} @Override public CompletableFuture<Void> asyncUpdateWorkflow( String workflowInstanceId, String[] keys, Object[] values) { return CompletableFuture.completedFuture(null); } @Override public void removeTask(String workflowId, String taskId) {} @Override public CompletableFuture<Void> asyncRemoveTask(String workflowId, String taskId) { return CompletableFuture.completedFuture(null); } @Override public void updateTask(String workflowId, String taskId, String[] keys, Object[] values) {} @Override public CompletableFuture<Void> asyncUpdateTask( String workflowId, String taskId, String[] keys, Object[] values) { return CompletableFuture.completedFuture(null); } @Override public String get(String workflowInstanceId, String key) { return null; } @Override public void addTaskExecutionLogs(List<TaskExecLog> logs) {} @Override public CompletableFuture<Void> asyncAddTaskExecutionLogs(List<TaskExecLog> logs) { return CompletableFuture.completedFuture(null); } @Override public List<TaskExecLog> getTaskExecutionLogs(String taskId) { return Collections.emptyList(); } @Override public void addEventExecution(EventExecution eventExecution) {} @Override public List<EventExecution> getEventExecutions(String event) { return Collections.emptyList(); } @Override public CompletableFuture<Void> asyncAddEventExecution(EventExecution eventExecution) { return null; } @Override public void addMessage(String queue, Message msg) {} @Override public CompletableFuture<Void> asyncAddMessage(String queue, Message message) { return CompletableFuture.completedFuture(null); } @Override public List<Message> getMessages(String queue) { return Collections.emptyList(); } @Override public List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays) { return Collections.emptyList(); } @Override public long getWorkflowCount(String query, String freeText) { return 0; } }
6,730
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/index/NoopIndexDAOConfiguration.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.index; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import com.netflix.conductor.dao.IndexDAO; @Configuration(proxyBeanMethods = false) @ConditionalOnProperty(name = "conductor.indexing.enabled", havingValue = "false") public class NoopIndexDAOConfiguration { @Bean public IndexDAO noopIndexDAO() { return new NoopIndexDAO(); } }
6,731
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/sync/Lock.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.sync; import java.util.concurrent.TimeUnit; /** * Interface implemented by a distributed lock client. * * <p>A typical usage: * * <pre> * if (acquireLock(workflowId, 5, TimeUnit.MILLISECONDS)) { * [load and execute workflow....] * ExecutionDAO.updateWorkflow(workflow); //use optimistic locking * } finally { * releaseLock(workflowId) * } * </pre> */ public interface Lock { /** * Acquires a re-entrant lock on lockId, blocks indefinitely on lockId until it succeeds * * @param lockId resource to lock on */ void acquireLock(String lockId); /** * Acquires a re-entrant lock on lockId, blocks for timeToTry duration before giving up * * @param lockId resource to lock on * @param timeToTry blocks up to timeToTry duration in attempt to acquire the lock * @param unit time unit * @return true, if successfully acquired */ boolean acquireLock(String lockId, long timeToTry, TimeUnit unit); /** * Acquires a re-entrant lock on lockId with provided leaseTime duration. Blocks for timeToTry * duration before giving up * * @param lockId resource to lock on * @param timeToTry blocks up to timeToTry duration in attempt to acquire the lock * @param leaseTime Lock lease expiration duration. * @param unit time unit * @return true, if successfully acquired */ boolean acquireLock(String lockId, long timeToTry, long leaseTime, TimeUnit unit); /** * Release a previously acquired lock * * @param lockId resource to lock on */ void releaseLock(String lockId); /** * Explicitly cleanup lock resources, if releasing it wouldn't do so. * * @param lockId resource to lock on */ void deleteLock(String lockId); }
6,732
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/sync
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/sync/noop/NoopLock.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.sync.noop; import java.util.concurrent.TimeUnit; import com.netflix.conductor.core.sync.Lock; public class NoopLock implements Lock { @Override public void acquireLock(String lockId) {} @Override public boolean acquireLock(String lockId, long timeToTry, TimeUnit unit) { return true; } @Override public boolean acquireLock(String lockId, long timeToTry, long leaseTime, TimeUnit unit) { return true; } @Override public void releaseLock(String lockId) {} @Override public void deleteLock(String lockId) {} }
6,733
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/sync
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLockConfiguration.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.sync.local; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import com.netflix.conductor.core.sync.Lock; @Configuration @ConditionalOnProperty(name = "conductor.workflow-execution-lock.type", havingValue = "local_only") public class LocalOnlyLockConfiguration { @Bean public Lock provideLock() { return new LocalOnlyLock(); } }
6,734
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/sync
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/sync/local/LocalOnlyLock.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.sync.local; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.netflix.conductor.annotations.VisibleForTesting; import com.netflix.conductor.core.sync.Lock; import com.github.benmanes.caffeine.cache.CacheLoader; import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.LoadingCache; public class LocalOnlyLock implements Lock { private static final Logger LOGGER = LoggerFactory.getLogger(LocalOnlyLock.class); private static final CacheLoader<String, ReentrantLock> LOADER = key -> new ReentrantLock(true); private static final ConcurrentHashMap<String, ScheduledFuture<?>> SCHEDULEDFUTURES = new ConcurrentHashMap<>(); private static final LoadingCache<String, ReentrantLock> LOCKIDTOSEMAPHOREMAP = Caffeine.newBuilder().build(LOADER); private static final ThreadGroup THREAD_GROUP = new ThreadGroup("LocalOnlyLock-scheduler"); private static final ThreadFactory THREAD_FACTORY = runnable -> new Thread(THREAD_GROUP, runnable); private static final ScheduledExecutorService SCHEDULER = Executors.newScheduledThreadPool(1, THREAD_FACTORY); @Override public void acquireLock(String lockId) { LOGGER.trace("Locking {}", lockId); LOCKIDTOSEMAPHOREMAP.get(lockId).lock(); } @Override public boolean acquireLock(String lockId, long timeToTry, TimeUnit unit) { try { LOGGER.trace("Locking {} with timeout {} {}", lockId, timeToTry, unit); return LOCKIDTOSEMAPHOREMAP.get(lockId).tryLock(timeToTry, unit); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } @Override public boolean acquireLock(String lockId, long timeToTry, long leaseTime, TimeUnit unit) { LOGGER.trace( "Locking {} with timeout {} {} for {} {}", lockId, timeToTry, unit, leaseTime, unit); if (acquireLock(lockId, timeToTry, unit)) { LOGGER.trace("Releasing {} automatically after {} {}", lockId, leaseTime, unit); SCHEDULEDFUTURES.put( lockId, SCHEDULER.schedule(() -> deleteLock(lockId), leaseTime, unit)); return true; } return false; } private void removeLeaseExpirationJob(String lockId) { ScheduledFuture<?> schedFuture = SCHEDULEDFUTURES.get(lockId); if (schedFuture != null && schedFuture.cancel(false)) { SCHEDULEDFUTURES.remove(lockId); LOGGER.trace("lockId {} removed from lease expiration job", lockId); } } @Override public void releaseLock(String lockId) { // Synchronized to prevent race condition between semaphore check and actual release synchronized (LOCKIDTOSEMAPHOREMAP) { if (LOCKIDTOSEMAPHOREMAP.getIfPresent(lockId) == null) { return; } LOGGER.trace("Releasing {}", lockId); LOCKIDTOSEMAPHOREMAP.get(lockId).unlock(); removeLeaseExpirationJob(lockId); } } @Override public void deleteLock(String lockId) { LOGGER.trace("Deleting {}", lockId); LOCKIDTOSEMAPHOREMAP.invalidate(lockId); } @VisibleForTesting LoadingCache<String, ReentrantLock> cache() { return LOCKIDTOSEMAPHOREMAP; } @VisibleForTesting ConcurrentHashMap<String, ScheduledFuture<?>> scheduledFutures() { return SCHEDULEDFUTURES; } }
6,735
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/dal/ExecutionDAOFacade.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.dal; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.annotation.PreDestroy; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.common.utils.ExternalPayloadStorage; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.exception.TransientException; import com.netflix.conductor.core.utils.ExternalPayloadStorageUtils; import com.netflix.conductor.core.utils.QueueUtils; import com.netflix.conductor.dao.*; import com.netflix.conductor.metrics.Monitors; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import static com.netflix.conductor.core.utils.Utils.DECIDER_QUEUE; /** * Service that acts as a facade for accessing execution data from the {@link ExecutionDAO}, {@link * RateLimitingDAO} and {@link IndexDAO} storage layers */ @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Component public class ExecutionDAOFacade { private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionDAOFacade.class); private static final String ARCHIVED_FIELD = "archived"; private static final String RAW_JSON_FIELD = "rawJSON"; private final ExecutionDAO executionDAO; private final QueueDAO queueDAO; private final IndexDAO indexDAO; private final RateLimitingDAO rateLimitingDao; private final ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO; private final PollDataDAO pollDataDAO; private final ObjectMapper objectMapper; private final ConductorProperties properties; private final ExternalPayloadStorageUtils externalPayloadStorageUtils; private final ScheduledThreadPoolExecutor scheduledThreadPoolExecutor; public ExecutionDAOFacade( ExecutionDAO executionDAO, QueueDAO queueDAO, IndexDAO indexDAO, RateLimitingDAO rateLimitingDao, ConcurrentExecutionLimitDAO concurrentExecutionLimitDAO, PollDataDAO pollDataDAO, ObjectMapper objectMapper, ConductorProperties properties, ExternalPayloadStorageUtils externalPayloadStorageUtils) { this.executionDAO = executionDAO; this.queueDAO = queueDAO; this.indexDAO = indexDAO; this.rateLimitingDao = rateLimitingDao; this.concurrentExecutionLimitDAO = concurrentExecutionLimitDAO; this.pollDataDAO = pollDataDAO; this.objectMapper = objectMapper; this.properties = properties; this.externalPayloadStorageUtils = externalPayloadStorageUtils; this.scheduledThreadPoolExecutor = new ScheduledThreadPoolExecutor( 4, (runnable, executor) -> { LOGGER.warn( "Request {} to delay updating index dropped in executor {}", runnable, executor); Monitors.recordDiscardedIndexingCount("delayQueue"); }); this.scheduledThreadPoolExecutor.setRemoveOnCancelPolicy(true); } @PreDestroy public void shutdownExecutorService() { try { LOGGER.info("Gracefully shutdown executor service"); scheduledThreadPoolExecutor.shutdown(); if (scheduledThreadPoolExecutor.awaitTermination( properties.getAsyncUpdateDelay().getSeconds(), TimeUnit.SECONDS)) { LOGGER.debug("tasks completed, shutting down"); } else { LOGGER.warn( "Forcing shutdown after waiting for {} seconds", properties.getAsyncUpdateDelay()); scheduledThreadPoolExecutor.shutdownNow(); } } catch (InterruptedException ie) { LOGGER.warn( "Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); scheduledThreadPoolExecutor.shutdownNow(); Thread.currentThread().interrupt(); } } public WorkflowModel getWorkflowModel(String workflowId, boolean includeTasks) { WorkflowModel workflowModel = getWorkflowModelFromDataStore(workflowId, includeTasks); populateWorkflowAndTaskPayloadData(workflowModel); return workflowModel; } /** * Fetches the {@link Workflow} object from the data store given the id. Attempts to fetch from * {@link ExecutionDAO} first, if not found, attempts to fetch from {@link IndexDAO}. * * @param workflowId the id of the workflow to be fetched * @param includeTasks if true, fetches the {@link Task} data in the workflow. * @return the {@link Workflow} object * @throws NotFoundException no such {@link Workflow} is found. * @throws TransientException parsing the {@link Workflow} object fails. */ public Workflow getWorkflow(String workflowId, boolean includeTasks) { return getWorkflowModelFromDataStore(workflowId, includeTasks).toWorkflow(); } private WorkflowModel getWorkflowModelFromDataStore(String workflowId, boolean includeTasks) { WorkflowModel workflow = executionDAO.getWorkflow(workflowId, includeTasks); if (workflow == null) { LOGGER.debug("Workflow {} not found in executionDAO, checking indexDAO", workflowId); String json = indexDAO.get(workflowId, RAW_JSON_FIELD); if (json == null) { String errorMsg = String.format("No such workflow found by id: %s", workflowId); LOGGER.error(errorMsg); throw new NotFoundException(errorMsg); } try { workflow = objectMapper.readValue(json, WorkflowModel.class); if (!includeTasks) { workflow.getTasks().clear(); } } catch (IOException e) { String errorMsg = String.format("Error reading workflow: %s", workflowId); LOGGER.error(errorMsg); throw new TransientException(errorMsg, e); } } return workflow; } /** * Retrieve all workflow executions with the given correlationId and workflow type Uses the * {@link IndexDAO} to search across workflows if the {@link ExecutionDAO} cannot perform * searches across workflows. * * @param workflowName, workflow type to be queried * @param correlationId the correlation id to be queried * @param includeTasks if true, fetches the {@link Task} data within the workflows * @return the list of {@link Workflow} executions matching the correlationId */ public List<Workflow> getWorkflowsByCorrelationId( String workflowName, String correlationId, boolean includeTasks) { if (!executionDAO.canSearchAcrossWorkflows()) { String query = "correlationId='" + correlationId + "' AND workflowType='" + workflowName + "'"; SearchResult<String> result = indexDAO.searchWorkflows(query, "*", 0, 1000, null); return result.getResults().stream() .parallel() .map( workflowId -> { try { return getWorkflow(workflowId, includeTasks); } catch (NotFoundException e) { // This might happen when the workflow archival failed and the // workflow was removed from primary datastore LOGGER.error( "Error getting the workflow: {} for correlationId: {} from datastore/index", workflowId, correlationId, e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toList()); } return executionDAO .getWorkflowsByCorrelationId(workflowName, correlationId, includeTasks) .stream() .map(WorkflowModel::toWorkflow) .collect(Collectors.toList()); } public List<Workflow> getWorkflowsByName(String workflowName, Long startTime, Long endTime) { return executionDAO.getWorkflowsByType(workflowName, startTime, endTime).stream() .map(WorkflowModel::toWorkflow) .collect(Collectors.toList()); } public List<Workflow> getPendingWorkflowsByName(String workflowName, int version) { return executionDAO.getPendingWorkflowsByType(workflowName, version).stream() .map(WorkflowModel::toWorkflow) .collect(Collectors.toList()); } public List<String> getRunningWorkflowIds(String workflowName, int version) { return executionDAO.getRunningWorkflowIds(workflowName, version); } public long getPendingWorkflowCount(String workflowName) { return executionDAO.getPendingWorkflowCount(workflowName); } /** * Creates a new workflow in the data store * * @param workflowModel the workflow to be created * @return the id of the created workflow */ public String createWorkflow(WorkflowModel workflowModel) { externalizeWorkflowData(workflowModel); executionDAO.createWorkflow(workflowModel); // Add to decider queue queueDAO.push( DECIDER_QUEUE, workflowModel.getWorkflowId(), workflowModel.getPriority(), properties.getWorkflowOffsetTimeout().getSeconds()); if (properties.isAsyncIndexingEnabled()) { indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); } else { indexDAO.indexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); } return workflowModel.getWorkflowId(); } private void externalizeTaskData(TaskModel taskModel) { externalPayloadStorageUtils.verifyAndUpload( taskModel, ExternalPayloadStorage.PayloadType.TASK_INPUT); externalPayloadStorageUtils.verifyAndUpload( taskModel, ExternalPayloadStorage.PayloadType.TASK_OUTPUT); } private void externalizeWorkflowData(WorkflowModel workflowModel) { externalPayloadStorageUtils.verifyAndUpload( workflowModel, ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT); externalPayloadStorageUtils.verifyAndUpload( workflowModel, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT); } /** * Updates the given workflow in the data store * * @param workflowModel the workflow tp be updated * @return the id of the updated workflow */ public String updateWorkflow(WorkflowModel workflowModel) { workflowModel.setUpdatedTime(System.currentTimeMillis()); if (workflowModel.getStatus().isTerminal()) { workflowModel.setEndTime(System.currentTimeMillis()); } externalizeWorkflowData(workflowModel); executionDAO.updateWorkflow(workflowModel); if (properties.isAsyncIndexingEnabled()) { if (workflowModel.getStatus().isTerminal() && workflowModel.getEndTime() - workflowModel.getCreateTime() < properties.getAsyncUpdateShortRunningWorkflowDuration().toMillis()) { final String workflowId = workflowModel.getWorkflowId(); DelayWorkflowUpdate delayWorkflowUpdate = new DelayWorkflowUpdate(workflowId); LOGGER.debug( "Delayed updating workflow: {} in the index by {} seconds", workflowId, properties.getAsyncUpdateDelay()); scheduledThreadPoolExecutor.schedule( delayWorkflowUpdate, properties.getAsyncUpdateDelay().getSeconds(), TimeUnit.SECONDS); Monitors.recordWorkerQueueSize( "delayQueue", scheduledThreadPoolExecutor.getQueue().size()); } else { indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); } if (workflowModel.getStatus().isTerminal()) { workflowModel .getTasks() .forEach( taskModel -> indexDAO.asyncIndexTask( new TaskSummary(taskModel.toTask()))); } } else { indexDAO.indexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); } return workflowModel.getWorkflowId(); } public void removeFromPendingWorkflow(String workflowType, String workflowId) { executionDAO.removeFromPendingWorkflow(workflowType, workflowId); } /** * Removes the workflow from the data store. * * @param workflowId the id of the workflow to be removed * @param archiveWorkflow if true, the workflow and associated tasks will be archived in the * {@link IndexDAO} after removal from {@link ExecutionDAO}. */ public void removeWorkflow(String workflowId, boolean archiveWorkflow) { WorkflowModel workflow = getWorkflowModelFromDataStore(workflowId, true); executionDAO.removeWorkflow(workflowId); try { removeWorkflowIndex(workflow, archiveWorkflow); } catch (JsonProcessingException e) { throw new TransientException("Workflow can not be serialized to json", e); } workflow.getTasks() .forEach( task -> { try { removeTaskIndex(workflow, task, archiveWorkflow); } catch (JsonProcessingException e) { throw new TransientException( String.format( "Task %s of workflow %s can not be serialized to json", task.getTaskId(), workflow.getWorkflowId()), e); } try { queueDAO.remove(QueueUtils.getQueueName(task), task.getTaskId()); } catch (Exception e) { LOGGER.info( "Error removing task: {} of workflow: {} from {} queue", workflowId, task.getTaskId(), QueueUtils.getQueueName(task), e); } }); try { queueDAO.remove(DECIDER_QUEUE, workflowId); } catch (Exception e) { LOGGER.info("Error removing workflow: {} from decider queue", workflowId, e); } } private void removeWorkflowIndex(WorkflowModel workflow, boolean archiveWorkflow) throws JsonProcessingException { if (archiveWorkflow) { if (workflow.getStatus().isTerminal()) { // Only allow archival if workflow is in terminal state // DO NOT archive async, since if archival errors out, workflow data will be lost indexDAO.updateWorkflow( workflow.getWorkflowId(), new String[] {RAW_JSON_FIELD, ARCHIVED_FIELD}, new Object[] {objectMapper.writeValueAsString(workflow), true}); } else { throw new IllegalArgumentException( String.format( "Cannot archive workflow: %s with status: %s", workflow.getWorkflowId(), workflow.getStatus())); } } else { // Not archiving, also remove workflow from index indexDAO.asyncRemoveWorkflow(workflow.getWorkflowId()); } } public void removeWorkflowWithExpiry( String workflowId, boolean archiveWorkflow, int ttlSeconds) { try { WorkflowModel workflow = getWorkflowModelFromDataStore(workflowId, true); removeWorkflowIndex(workflow, archiveWorkflow); // remove workflow from DAO with TTL executionDAO.removeWorkflowWithExpiry(workflowId, ttlSeconds); } catch (Exception e) { Monitors.recordDaoError("executionDao", "removeWorkflow"); throw new TransientException("Error removing workflow: " + workflowId, e); } } /** * Reset the workflow state by removing from the {@link ExecutionDAO} and removing this workflow * from the {@link IndexDAO}. * * @param workflowId the workflow id to be reset */ public void resetWorkflow(String workflowId) { getWorkflowModelFromDataStore(workflowId, true); executionDAO.removeWorkflow(workflowId); try { if (properties.isAsyncIndexingEnabled()) { indexDAO.asyncRemoveWorkflow(workflowId); } else { indexDAO.removeWorkflow(workflowId); } } catch (Exception e) { throw new TransientException("Error resetting workflow state: " + workflowId, e); } } public List<TaskModel> createTasks(List<TaskModel> tasks) { tasks.forEach(this::externalizeTaskData); return executionDAO.createTasks(tasks); } public List<Task> getTasksForWorkflow(String workflowId) { return executionDAO.getTasksForWorkflow(workflowId).stream() .map(TaskModel::toTask) .collect(Collectors.toList()); } public TaskModel getTaskModel(String taskId) { TaskModel taskModel = getTaskFromDatastore(taskId); if (taskModel != null) { populateTaskData(taskModel); } return taskModel; } public Task getTask(String taskId) { TaskModel taskModel = getTaskFromDatastore(taskId); if (taskModel != null) { return taskModel.toTask(); } return null; } private TaskModel getTaskFromDatastore(String taskId) { return executionDAO.getTask(taskId); } public List<Task> getTasksByName(String taskName, String startKey, int count) { return executionDAO.getTasks(taskName, startKey, count).stream() .map(TaskModel::toTask) .collect(Collectors.toList()); } public List<Task> getPendingTasksForTaskType(String taskType) { return executionDAO.getPendingTasksForTaskType(taskType).stream() .map(TaskModel::toTask) .collect(Collectors.toList()); } public long getInProgressTaskCount(String taskDefName) { return executionDAO.getInProgressTaskCount(taskDefName); } /** * Sets the update time for the task. Sets the end time for the task (if task is in terminal * state and end time is not set). Updates the task in the {@link ExecutionDAO} first, then * stores it in the {@link IndexDAO}. * * @param taskModel the task to be updated in the data store * @throws TransientException if the {@link IndexDAO} or {@link ExecutionDAO} operations fail. * @throws com.netflix.conductor.core.exception.NonTransientException if the externalization of * payload fails. */ public void updateTask(TaskModel taskModel) { if (taskModel.getStatus() != null) { if (!taskModel.getStatus().isTerminal() || (taskModel.getStatus().isTerminal() && taskModel.getUpdateTime() == 0)) { taskModel.setUpdateTime(System.currentTimeMillis()); } if (taskModel.getStatus().isTerminal() && taskModel.getEndTime() == 0) { taskModel.setEndTime(System.currentTimeMillis()); } } externalizeTaskData(taskModel); executionDAO.updateTask(taskModel); try { /* * Indexing a task for every update adds a lot of volume. That is ok but if async indexing * is enabled and tasks are stored in memory until a block has completed, we would lose a lot * of tasks on a system failure. So only index for each update if async indexing is not enabled. * If it *is* enabled, tasks will be indexed only when a workflow is in terminal state. */ if (!properties.isAsyncIndexingEnabled()) { indexDAO.indexTask(new TaskSummary(taskModel.toTask())); } } catch (TerminateWorkflowException e) { // re-throw it so we can terminate the workflow throw e; } catch (Exception e) { String errorMsg = String.format( "Error updating task: %s in workflow: %s", taskModel.getTaskId(), taskModel.getWorkflowInstanceId()); LOGGER.error(errorMsg, e); throw new TransientException(errorMsg, e); } } public void updateTasks(List<TaskModel> tasks) { tasks.forEach(this::updateTask); } public void removeTask(String taskId) { executionDAO.removeTask(taskId); } private void removeTaskIndex(WorkflowModel workflow, TaskModel task, boolean archiveTask) throws JsonProcessingException { if (archiveTask) { if (task.getStatus().isTerminal()) { // Only allow archival if task is in terminal state // DO NOT archive async, since if archival errors out, task data will be lost indexDAO.updateTask( workflow.getWorkflowId(), task.getTaskId(), new String[] {ARCHIVED_FIELD}, new Object[] {true}); } else { throw new IllegalArgumentException( String.format( "Cannot archive task: %s of workflow: %s with status: %s", task.getTaskId(), workflow.getWorkflowId(), task.getStatus())); } } else { // Not archiving, remove task from index indexDAO.asyncRemoveTask(workflow.getWorkflowId(), task.getTaskId()); } } public void extendLease(TaskModel taskModel) { taskModel.setUpdateTime(System.currentTimeMillis()); executionDAO.updateTask(taskModel); } public List<PollData> getTaskPollData(String taskName) { return pollDataDAO.getPollData(taskName); } public List<PollData> getAllPollData() { return pollDataDAO.getAllPollData(); } public PollData getTaskPollDataByDomain(String taskName, String domain) { try { return pollDataDAO.getPollData(taskName, domain); } catch (Exception e) { LOGGER.error( "Error fetching pollData for task: '{}', domain: '{}'", taskName, domain, e); return null; } } public void updateTaskLastPoll(String taskName, String domain, String workerId) { try { pollDataDAO.updateLastPollData(taskName, domain, workerId); } catch (Exception e) { LOGGER.error( "Error updating PollData for task: {} in domain: {} from worker: {}", taskName, domain, workerId, e); Monitors.error(this.getClass().getCanonicalName(), "updateTaskLastPoll"); } } /** * Save the {@link EventExecution} to the data store Saves to {@link ExecutionDAO} first, if * this succeeds then saves to the {@link IndexDAO}. * * @param eventExecution the {@link EventExecution} to be saved * @return true if save succeeds, false otherwise. */ public boolean addEventExecution(EventExecution eventExecution) { boolean added = executionDAO.addEventExecution(eventExecution); if (added) { indexEventExecution(eventExecution); } return added; } public void updateEventExecution(EventExecution eventExecution) { executionDAO.updateEventExecution(eventExecution); indexEventExecution(eventExecution); } private void indexEventExecution(EventExecution eventExecution) { if (properties.isEventExecutionIndexingEnabled()) { if (properties.isAsyncIndexingEnabled()) { indexDAO.asyncAddEventExecution(eventExecution); } else { indexDAO.addEventExecution(eventExecution); } } } public void removeEventExecution(EventExecution eventExecution) { executionDAO.removeEventExecution(eventExecution); } public boolean exceedsInProgressLimit(TaskModel task) { return concurrentExecutionLimitDAO.exceedsLimit(task); } public boolean exceedsRateLimitPerFrequency(TaskModel task, TaskDef taskDef) { return rateLimitingDao.exceedsRateLimitPerFrequency(task, taskDef); } public void addTaskExecLog(List<TaskExecLog> logs) { if (properties.isTaskExecLogIndexingEnabled() && !logs.isEmpty()) { Monitors.recordTaskExecLogSize(logs.size()); int taskExecLogSizeLimit = properties.getTaskExecLogSizeLimit(); if (logs.size() > taskExecLogSizeLimit) { LOGGER.warn( "Task Execution log size: {} for taskId: {} exceeds the limit: {}", logs.size(), logs.get(0).getTaskId(), taskExecLogSizeLimit); logs = logs.stream().limit(taskExecLogSizeLimit).collect(Collectors.toList()); } if (properties.isAsyncIndexingEnabled()) { indexDAO.asyncAddTaskExecutionLogs(logs); } else { indexDAO.addTaskExecutionLogs(logs); } } } public void addMessage(String queue, Message message) { if (properties.isAsyncIndexingEnabled()) { indexDAO.asyncAddMessage(queue, message); } else { indexDAO.addMessage(queue, message); } } public SearchResult<String> searchWorkflows( String query, String freeText, int start, int count, List<String> sort) { return indexDAO.searchWorkflows(query, freeText, start, count, sort); } public SearchResult<WorkflowSummary> searchWorkflowSummary( String query, String freeText, int start, int count, List<String> sort) { return indexDAO.searchWorkflowSummary(query, freeText, start, count, sort); } public SearchResult<String> searchTasks( String query, String freeText, int start, int count, List<String> sort) { return indexDAO.searchTasks(query, freeText, start, count, sort); } public SearchResult<TaskSummary> searchTaskSummary( String query, String freeText, int start, int count, List<String> sort) { return indexDAO.searchTaskSummary(query, freeText, start, count, sort); } public List<TaskExecLog> getTaskExecutionLogs(String taskId) { return properties.isTaskExecLogIndexingEnabled() ? indexDAO.getTaskExecutionLogs(taskId) : Collections.emptyList(); } /** * Populates the workflow input data and the tasks input/output data if stored in external * payload storage. * * @param workflowModel the workflowModel for which the payload data needs to be populated from * external storage (if applicable) */ public void populateWorkflowAndTaskPayloadData(WorkflowModel workflowModel) { if (StringUtils.isNotBlank(workflowModel.getExternalInputPayloadStoragePath())) { Map<String, Object> workflowInputParams = externalPayloadStorageUtils.downloadPayload( workflowModel.getExternalInputPayloadStoragePath()); Monitors.recordExternalPayloadStorageUsage( workflowModel.getWorkflowName(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT.toString()); workflowModel.internalizeInput(workflowInputParams); } if (StringUtils.isNotBlank(workflowModel.getExternalOutputPayloadStoragePath())) { Map<String, Object> workflowOutputParams = externalPayloadStorageUtils.downloadPayload( workflowModel.getExternalOutputPayloadStoragePath()); Monitors.recordExternalPayloadStorageUsage( workflowModel.getWorkflowName(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT.toString()); workflowModel.internalizeOutput(workflowOutputParams); } workflowModel.getTasks().forEach(this::populateTaskData); } public void populateTaskData(TaskModel taskModel) { if (StringUtils.isNotBlank(taskModel.getExternalOutputPayloadStoragePath())) { Map<String, Object> outputData = externalPayloadStorageUtils.downloadPayload( taskModel.getExternalOutputPayloadStoragePath()); taskModel.internalizeOutput(outputData); Monitors.recordExternalPayloadStorageUsage( taskModel.getTaskDefName(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.TASK_OUTPUT.toString()); } if (StringUtils.isNotBlank(taskModel.getExternalInputPayloadStoragePath())) { Map<String, Object> inputData = externalPayloadStorageUtils.downloadPayload( taskModel.getExternalInputPayloadStoragePath()); taskModel.internalizeInput(inputData); Monitors.recordExternalPayloadStorageUsage( taskModel.getTaskDefName(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.TASK_INPUT.toString()); } } class DelayWorkflowUpdate implements Runnable { private final String workflowId; DelayWorkflowUpdate(String workflowId) { this.workflowId = workflowId; } @Override public void run() { try { WorkflowModel workflowModel = executionDAO.getWorkflow(workflowId, false); indexDAO.asyncIndexWorkflow(new WorkflowSummary(workflowModel.toWorkflow())); } catch (Exception e) { LOGGER.error("Unable to update workflow: {}", workflowId, e); } } } }
6,736
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/DefaultEventProcessor.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.retry.support.RetryTemplate; import org.springframework.stereotype.Component; import org.springframework.util.CollectionUtils; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.events.EventExecution.Status; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.events.EventHandler.Action; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.events.queue.ObservableQueue; import com.netflix.conductor.core.exception.TransientException; import com.netflix.conductor.core.execution.evaluators.Evaluator; import com.netflix.conductor.core.utils.JsonUtils; import com.netflix.conductor.metrics.Monitors; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.MetadataService; import com.fasterxml.jackson.databind.ObjectMapper; import com.spotify.futures.CompletableFutures; import static com.netflix.conductor.core.utils.Utils.isTransientException; /** * Event Processor is used to dispatch actions configured in the event handlers, based on incoming * events to the event queues. * * <p><code>Set conductor.default-event-processor.enabled=false</code> to disable event processing. */ @Component @ConditionalOnProperty( name = "conductor.default-event-processor.enabled", havingValue = "true", matchIfMissing = true) public class DefaultEventProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultEventProcessor.class); private final MetadataService metadataService; private final ExecutionService executionService; private final ActionProcessor actionProcessor; private final ExecutorService eventActionExecutorService; private final ObjectMapper objectMapper; private final JsonUtils jsonUtils; private final boolean isEventMessageIndexingEnabled; private final Map<String, Evaluator> evaluators; private final RetryTemplate retryTemplate; public DefaultEventProcessor( ExecutionService executionService, MetadataService metadataService, ActionProcessor actionProcessor, JsonUtils jsonUtils, ConductorProperties properties, ObjectMapper objectMapper, Map<String, Evaluator> evaluators, @Qualifier("onTransientErrorRetryTemplate") RetryTemplate retryTemplate) { this.executionService = executionService; this.metadataService = metadataService; this.actionProcessor = actionProcessor; this.objectMapper = objectMapper; this.jsonUtils = jsonUtils; this.evaluators = evaluators; this.retryTemplate = retryTemplate; if (properties.getEventProcessorThreadCount() <= 0) { throw new IllegalStateException( "Cannot set event processor thread count to <=0. To disable event " + "processing, set conductor.default-event-processor.enabled=false."); } ThreadFactory threadFactory = new BasicThreadFactory.Builder() .namingPattern("event-action-executor-thread-%d") .build(); eventActionExecutorService = Executors.newFixedThreadPool( properties.getEventProcessorThreadCount(), threadFactory); this.isEventMessageIndexingEnabled = properties.isEventMessageIndexingEnabled(); LOGGER.info("Event Processing is ENABLED"); } public void handle(ObservableQueue queue, Message msg) { List<EventExecution> transientFailures = null; boolean executionFailed = false; try { if (isEventMessageIndexingEnabled) { executionService.addMessage(queue.getName(), msg); } String event = queue.getType() + ":" + queue.getName(); LOGGER.debug("Evaluating message: {} for event: {}", msg.getId(), event); transientFailures = executeEvent(event, msg); } catch (Exception e) { executionFailed = true; LOGGER.error("Error handling message: {} on queue:{}", msg, queue.getName(), e); Monitors.recordEventQueueMessagesError(queue.getType(), queue.getName()); } finally { if (!executionFailed && CollectionUtils.isEmpty(transientFailures)) { queue.ack(Collections.singletonList(msg)); LOGGER.debug("Message: {} acked on queue: {}", msg.getId(), queue.getName()); } else if (queue.rePublishIfNoAck() || !CollectionUtils.isEmpty(transientFailures)) { // re-submit this message to the queue, to be retried later // This is needed for queues with no unack timeout, since messages are removed // from the queue queue.publish(Collections.singletonList(msg)); LOGGER.debug("Message: {} published to queue: {}", msg.getId(), queue.getName()); } else { queue.nack(Collections.singletonList(msg)); LOGGER.debug("Message: {} nacked on queue: {}", msg.getId(), queue.getName()); } Monitors.recordEventQueueMessagesHandled(queue.getType(), queue.getName()); } } /** * Executes all the actions configured on all the event handlers triggered by the {@link * Message} on the queue If any of the actions on an event handler fails due to a transient * failure, the execution is not persisted such that it can be retried * * @return a list of {@link EventExecution} that failed due to transient failures. */ protected List<EventExecution> executeEvent(String event, Message msg) throws Exception { List<EventHandler> eventHandlerList; List<EventExecution> transientFailures = new ArrayList<>(); try { eventHandlerList = metadataService.getEventHandlersForEvent(event, true); } catch (TransientException transientException) { transientFailures.add(new EventExecution(event, msg.getId())); return transientFailures; } Object payloadObject = getPayloadObject(msg.getPayload()); for (EventHandler eventHandler : eventHandlerList) { String condition = eventHandler.getCondition(); String evaluatorType = eventHandler.getEvaluatorType(); // Set default to true so that if condition is not specified, it falls through // to process the event. boolean success = true; if (StringUtils.isNotEmpty(condition) && evaluators.get(evaluatorType) != null) { Object result = evaluators .get(evaluatorType) .evaluate(condition, jsonUtils.expand(payloadObject)); success = ScriptEvaluator.toBoolean(result); } else if (StringUtils.isNotEmpty(condition)) { LOGGER.debug("Checking condition: {} for event: {}", condition, event); success = ScriptEvaluator.evalBool(condition, jsonUtils.expand(payloadObject)); } if (!success) { String id = msg.getId() + "_" + 0; EventExecution eventExecution = new EventExecution(id, msg.getId()); eventExecution.setCreated(System.currentTimeMillis()); eventExecution.setEvent(eventHandler.getEvent()); eventExecution.setName(eventHandler.getName()); eventExecution.setStatus(Status.SKIPPED); eventExecution.getOutput().put("msg", msg.getPayload()); eventExecution.getOutput().put("condition", condition); executionService.addEventExecution(eventExecution); LOGGER.debug( "Condition: {} not successful for event: {} with payload: {}", condition, eventHandler.getEvent(), msg.getPayload()); continue; } CompletableFuture<List<EventExecution>> future = executeActionsForEventHandler(eventHandler, msg); future.whenComplete( (result, error) -> result.forEach( eventExecution -> { if (error != null || eventExecution.getStatus() == Status.IN_PROGRESS) { transientFailures.add(eventExecution); } else { executionService.updateEventExecution( eventExecution); } })) .get(); } return processTransientFailures(transientFailures); } /** * Remove the event executions which failed temporarily. * * @param eventExecutions The event executions which failed with a transient error. * @return The event executions which failed with a transient error. */ protected List<EventExecution> processTransientFailures(List<EventExecution> eventExecutions) { eventExecutions.forEach(executionService::removeEventExecution); return eventExecutions; } /** * @param eventHandler the {@link EventHandler} for which the actions are to be executed * @param msg the {@link Message} that triggered the event * @return a {@link CompletableFuture} holding a list of {@link EventExecution}s for the {@link * Action}s executed in the event handler */ protected CompletableFuture<List<EventExecution>> executeActionsForEventHandler( EventHandler eventHandler, Message msg) { List<CompletableFuture<EventExecution>> futuresList = new ArrayList<>(); int i = 0; for (Action action : eventHandler.getActions()) { String id = msg.getId() + "_" + i++; EventExecution eventExecution = new EventExecution(id, msg.getId()); eventExecution.setCreated(System.currentTimeMillis()); eventExecution.setEvent(eventHandler.getEvent()); eventExecution.setName(eventHandler.getName()); eventExecution.setAction(action.getAction()); eventExecution.setStatus(Status.IN_PROGRESS); if (executionService.addEventExecution(eventExecution)) { futuresList.add( CompletableFuture.supplyAsync( () -> execute( eventExecution, action, getPayloadObject(msg.getPayload())), eventActionExecutorService)); } else { LOGGER.warn("Duplicate delivery/execution of message: {}", msg.getId()); } } return CompletableFutures.allAsList(futuresList); } /** * @param eventExecution the instance of {@link EventExecution} * @param action the {@link Action} to be executed for the event * @param payload the {@link Message#getPayload()} * @return the event execution updated with execution output, if the execution is * completed/failed with non-transient error the input event execution, if the execution * failed due to transient error */ protected EventExecution execute(EventExecution eventExecution, Action action, Object payload) { try { LOGGER.debug( "Executing action: {} for event: {} with messageId: {} with payload: {}", action.getAction(), eventExecution.getId(), eventExecution.getMessageId(), payload); // TODO: Switch to @Retryable annotation on SimpleActionProcessor.execute() Map<String, Object> output = retryTemplate.execute( context -> actionProcessor.execute( action, payload, eventExecution.getEvent(), eventExecution.getMessageId())); if (output != null) { eventExecution.getOutput().putAll(output); } eventExecution.setStatus(Status.COMPLETED); Monitors.recordEventExecutionSuccess( eventExecution.getEvent(), eventExecution.getName(), eventExecution.getAction().name()); } catch (RuntimeException e) { LOGGER.error( "Error executing action: {} for event: {} with messageId: {}", action.getAction(), eventExecution.getEvent(), eventExecution.getMessageId(), e); if (!isTransientException(e)) { // not a transient error, fail the event execution eventExecution.setStatus(Status.FAILED); eventExecution.getOutput().put("exception", e.getMessage()); Monitors.recordEventExecutionError( eventExecution.getEvent(), eventExecution.getName(), eventExecution.getAction().name(), e.getClass().getSimpleName()); } } return eventExecution; } private Object getPayloadObject(String payload) { Object payloadObject = null; if (payload != null) { try { payloadObject = objectMapper.readValue(payload, Object.class); } catch (Exception e) { payloadObject = payload; } } return payloadObject; } }
6,737
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/EventQueueProvider.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import org.springframework.lang.NonNull; import com.netflix.conductor.core.events.queue.ObservableQueue; public interface EventQueueProvider { String getQueueType(); /** * Creates or reads the {@link ObservableQueue} for the given <code>queueURI</code>. * * @param queueURI The URI of the queue. * @return The {@link ObservableQueue} implementation for the <code>queueURI</code>. * @throws IllegalArgumentException thrown when an {@link ObservableQueue} can not be created * for the <code>queueURI</code>. */ @NonNull ObservableQueue getQueue(String queueURI) throws IllegalArgumentException; }
6,738
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import java.util.Map; import com.netflix.conductor.common.metadata.events.EventHandler; public interface ActionProcessor { Map<String, Object> execute( EventHandler.Action action, Object payloadObject, String event, String messageId); }
6,739
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; import com.netflix.conductor.core.events.queue.ObservableQueue; import com.netflix.conductor.core.utils.ParametersUtils; /** Holders for internal event queues */ @Component public class EventQueues { public static final String EVENT_QUEUE_PROVIDERS_QUALIFIER = "EventQueueProviders"; private static final Logger LOGGER = LoggerFactory.getLogger(EventQueues.class); private final ParametersUtils parametersUtils; private final Map<String, EventQueueProvider> providers; @Autowired public EventQueues( @Qualifier(EVENT_QUEUE_PROVIDERS_QUALIFIER) Map<String, EventQueueProvider> providers, ParametersUtils parametersUtils) { this.providers = providers; this.parametersUtils = parametersUtils; } public List<String> getProviders() { return providers.values().stream() .map(p -> p.getClass().getName()) .collect(Collectors.toList()); } @NonNull public ObservableQueue getQueue(String eventType) { String event = parametersUtils.replace(eventType).toString(); int index = event.indexOf(':'); if (index == -1) { throw new IllegalArgumentException("Illegal event " + event); } String type = event.substring(0, index); String queueURI = event.substring(index + 1); EventQueueProvider provider = providers.get(type); if (provider != null) { return provider.getQueue(queueURI); } else { throw new IllegalArgumentException("Unknown queue type " + type); } } }
6,740
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/DefaultEventQueueManager.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.Lifecycle; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.core.LifecycleAwareComponent; import com.netflix.conductor.core.events.queue.DefaultEventQueueProcessor; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.events.queue.ObservableQueue; import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.metrics.Monitors; import com.netflix.conductor.model.TaskModel.Status; /** * Manages the event queues registered in the system and sets up listeners for these. * * <p>Manages the lifecycle of - * * <ul> * <li>Queues registered with event handlers * <li>Default event queues that Conductor listens on * </ul> * * @see DefaultEventQueueProcessor */ @Component @ConditionalOnProperty( name = "conductor.default-event-processor.enabled", havingValue = "true", matchIfMissing = true) public class DefaultEventQueueManager extends LifecycleAwareComponent implements EventQueueManager { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultEventQueueManager.class); private final EventHandlerDAO eventHandlerDAO; private final EventQueues eventQueues; private final DefaultEventProcessor defaultEventProcessor; private final Map<String, ObservableQueue> eventToQueueMap = new ConcurrentHashMap<>(); private final Map<Status, ObservableQueue> defaultQueues; public DefaultEventQueueManager( Map<Status, ObservableQueue> defaultQueues, EventHandlerDAO eventHandlerDAO, EventQueues eventQueues, DefaultEventProcessor defaultEventProcessor) { this.defaultQueues = defaultQueues; this.eventHandlerDAO = eventHandlerDAO; this.eventQueues = eventQueues; this.defaultEventProcessor = defaultEventProcessor; } /** * @return Returns a map of queues which are active. Key is event name and value is queue URI */ @Override public Map<String, String> getQueues() { Map<String, String> queues = new HashMap<>(); eventToQueueMap.forEach((key, value) -> queues.put(key, value.getName())); return queues; } @Override public Map<String, Map<String, Long>> getQueueSizes() { Map<String, Map<String, Long>> queues = new HashMap<>(); eventToQueueMap.forEach( (key, value) -> { Map<String, Long> size = new HashMap<>(); size.put(value.getName(), value.size()); queues.put(key, size); }); return queues; } @Override public void doStart() { eventToQueueMap.forEach( (event, queue) -> { LOGGER.info("Start listening for events: {}", event); queue.start(); }); defaultQueues.forEach( (status, queue) -> { LOGGER.info( "Start listening on default queue {} for status {}", queue.getName(), status); queue.start(); }); } @Override public void doStop() { eventToQueueMap.forEach( (event, queue) -> { LOGGER.info("Stop listening for events: {}", event); queue.stop(); }); defaultQueues.forEach( (status, queue) -> { LOGGER.info( "Stop listening on default queue {} for status {}", status, queue.getName()); queue.stop(); }); } @Scheduled(fixedDelay = 60_000) public void refreshEventQueues() { try { Set<String> events = eventHandlerDAO.getAllEventHandlers().stream() .filter(EventHandler::isActive) .map(EventHandler::getEvent) .collect(Collectors.toSet()); List<ObservableQueue> createdQueues = new LinkedList<>(); events.forEach( event -> eventToQueueMap.computeIfAbsent( event, s -> { ObservableQueue q = eventQueues.getQueue(event); createdQueues.add(q); return q; })); // start listening on all of the created queues createdQueues.stream() .filter(Objects::nonNull) .peek(Lifecycle::start) .forEach(this::listen); Set<String> removed = new HashSet<>(eventToQueueMap.keySet()); removed.removeAll(events); removed.forEach( key -> { ObservableQueue queue = eventToQueueMap.remove(key); try { queue.stop(); } catch (Exception e) { LOGGER.error("Failed to stop queue: " + queue, e); } }); LOGGER.debug("Event queues: {}", eventToQueueMap.keySet()); LOGGER.debug("Stored queue: {}", events); LOGGER.debug("Removed queue: {}", removed); } catch (Exception e) { Monitors.error(getClass().getSimpleName(), "refresh"); LOGGER.error("refresh event queues failed", e); } } private void listen(ObservableQueue queue) { queue.observe().subscribe((Message msg) -> defaultEventProcessor.handle(queue, msg)); } }
6,741
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/SimpleActionProcessor.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import java.util.*; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.events.EventHandler.Action; import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow; import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.utils.TaskUtils; import com.netflix.conductor.core.execution.StartWorkflowInput; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.core.operation.StartWorkflowOperation; import com.netflix.conductor.core.utils.JsonUtils; import com.netflix.conductor.core.utils.ParametersUtils; import com.netflix.conductor.metrics.Monitors; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * Action Processor subscribes to the Event Actions queue and processes the actions (e.g. start * workflow etc) */ @Component public class SimpleActionProcessor implements ActionProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(SimpleActionProcessor.class); private final WorkflowExecutor workflowExecutor; private final ParametersUtils parametersUtils; private final JsonUtils jsonUtils; private final StartWorkflowOperation startWorkflowOperation; public SimpleActionProcessor( WorkflowExecutor workflowExecutor, ParametersUtils parametersUtils, JsonUtils jsonUtils, StartWorkflowOperation startWorkflowOperation) { this.workflowExecutor = workflowExecutor; this.parametersUtils = parametersUtils; this.jsonUtils = jsonUtils; this.startWorkflowOperation = startWorkflowOperation; } public Map<String, Object> execute( Action action, Object payloadObject, String event, String messageId) { LOGGER.debug( "Executing action: {} for event: {} with messageId:{}", action.getAction(), event, messageId); Object jsonObject = payloadObject; if (action.isExpandInlineJSON()) { jsonObject = jsonUtils.expand(payloadObject); } switch (action.getAction()) { case start_workflow: return startWorkflow(action, jsonObject, event, messageId); case complete_task: return completeTask( action, jsonObject, action.getComplete_task(), TaskModel.Status.COMPLETED, event, messageId); case fail_task: return completeTask( action, jsonObject, action.getFail_task(), TaskModel.Status.FAILED, event, messageId); default: break; } throw new UnsupportedOperationException( "Action not supported " + action.getAction() + " for event " + event); } private Map<String, Object> completeTask( Action action, Object payload, TaskDetails taskDetails, TaskModel.Status status, String event, String messageId) { Map<String, Object> input = new HashMap<>(); input.put("workflowId", taskDetails.getWorkflowId()); input.put("taskId", taskDetails.getTaskId()); input.put("taskRefName", taskDetails.getTaskRefName()); input.putAll(taskDetails.getOutput()); Map<String, Object> replaced = parametersUtils.replace(input, payload); String workflowId = (String) replaced.get("workflowId"); String taskId = (String) replaced.get("taskId"); String taskRefName = (String) replaced.get("taskRefName"); TaskModel taskModel = null; if (StringUtils.isNotEmpty(taskId)) { taskModel = workflowExecutor.getTask(taskId); } else if (StringUtils.isNotEmpty(workflowId) && StringUtils.isNotEmpty(taskRefName)) { WorkflowModel workflow = workflowExecutor.getWorkflow(workflowId, true); if (workflow == null) { replaced.put("error", "No workflow found with ID: " + workflowId); return replaced; } taskModel = workflow.getTaskByRefName(taskRefName); // Task can be loopover task.In such case find corresponding task and update List<TaskModel> loopOverTaskList = workflow.getTasks().stream() .filter( t -> TaskUtils.removeIterationFromTaskRefName( t.getReferenceTaskName()) .equals(taskRefName)) .collect(Collectors.toList()); if (!loopOverTaskList.isEmpty()) { // Find loopover task with the highest iteration value taskModel = loopOverTaskList.stream() .sorted(Comparator.comparingInt(TaskModel::getIteration).reversed()) .findFirst() .get(); } } if (taskModel == null) { replaced.put( "error", "No task found with taskId: " + taskId + ", reference name: " + taskRefName + ", workflowId: " + workflowId); return replaced; } taskModel.setStatus(status); taskModel.setOutputData(replaced); taskModel.setOutputMessage(taskDetails.getOutputMessage()); taskModel.addOutput("conductor.event.messageId", messageId); taskModel.addOutput("conductor.event.name", event); try { workflowExecutor.updateTask(new TaskResult(taskModel.toTask())); LOGGER.debug( "Updated task: {} in workflow:{} with status: {} for event: {} for message:{}", taskId, workflowId, status, event, messageId); } catch (RuntimeException e) { Monitors.recordEventActionError( action.getAction().name(), taskModel.getTaskType(), event); LOGGER.error( "Error updating task: {} in workflow: {} in action: {} for event: {} for message: {}", taskDetails.getTaskRefName(), taskDetails.getWorkflowId(), action.getAction(), event, messageId, e); replaced.put("error", e.getMessage()); throw e; } return replaced; } private Map<String, Object> startWorkflow( Action action, Object payload, String event, String messageId) { StartWorkflow params = action.getStart_workflow(); Map<String, Object> output = new HashMap<>(); try { Map<String, Object> inputParams = params.getInput(); Map<String, Object> workflowInput = parametersUtils.replace(inputParams, payload); Map<String, Object> paramsMap = new HashMap<>(); Optional.ofNullable(params.getCorrelationId()) .ifPresent(value -> paramsMap.put("correlationId", value)); Map<String, Object> replaced = parametersUtils.replace(paramsMap, payload); workflowInput.put("conductor.event.messageId", messageId); workflowInput.put("conductor.event.name", event); StartWorkflowInput startWorkflowInput = new StartWorkflowInput(); startWorkflowInput.setName(params.getName()); startWorkflowInput.setVersion(params.getVersion()); startWorkflowInput.setCorrelationId( Optional.ofNullable(replaced.get("correlationId")) .map(Object::toString) .orElse(params.getCorrelationId())); startWorkflowInput.setWorkflowInput(workflowInput); startWorkflowInput.setEvent(event); startWorkflowInput.setTaskToDomain(params.getTaskToDomain()); String workflowId = startWorkflowOperation.execute(startWorkflowInput); output.put("workflowId", workflowId); LOGGER.debug( "Started workflow: {}/{}/{} for event: {} for message:{}", params.getName(), params.getVersion(), workflowId, event, messageId); } catch (RuntimeException e) { Monitors.recordEventActionError(action.getAction().name(), params.getName(), event); LOGGER.error( "Error starting workflow: {}, version: {}, for event: {} for message: {}", params.getName(), params.getVersion(), event, messageId, e); output.put("error", e.getMessage()); throw e; } return output; } }
6,742
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/EventQueueManager.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import java.util.Map; public interface EventQueueManager { Map<String, String> getQueues(); Map<String, Map<String, Long>> getQueueSizes(); }
6,743
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/ScriptEvaluator.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events; import javax.script.Bindings; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; public class ScriptEvaluator { private static ScriptEngine engine; private ScriptEvaluator() {} /** * Evaluates the script with the help of input provided but converts the result to a boolean * value. * * @param script Script to be evaluated. * @param input Input parameters. * @throws ScriptException * @return True or False based on the result of the evaluated expression. */ public static Boolean evalBool(String script, Object input) throws ScriptException { return toBoolean(eval(script, input)); } /** * Evaluates the script with the help of input provided. * * @param script Script to be evaluated. * @param input Input parameters. * @throws ScriptException * @return Generic object, the result of the evaluated expression. */ public static Object eval(String script, Object input) throws ScriptException { if (engine == null) { engine = new ScriptEngineManager().getEngineByName("Nashorn"); } if (engine == null) { throw new RuntimeException( "missing nashorn engine. Ensure you are running supported JVM"); } Bindings bindings = engine.createBindings(); bindings.put("$", input); return engine.eval(script, bindings); } /** * Converts a generic object into boolean value. Checks if the Object is of type Boolean and * returns the value of the Boolean object. Checks if the Object is of type Number and returns * True if the value is greater than 0. * * @param input Generic object that will be inspected to return a boolean value. * @return True or False based on the input provided. */ public static Boolean toBoolean(Object input) { if (input instanceof Boolean) { return ((Boolean) input); } else if (input instanceof Number) { return ((Number) input).doubleValue() > 0; } return false; } }
6,744
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/queue/Message.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events.queue; import java.util.Objects; public class Message { private String payload; private String id; private String receipt; private int priority; public Message() {} public Message(String id, String payload, String receipt) { this.payload = payload; this.id = id; this.receipt = receipt; } public Message(String id, String payload, String receipt, int priority) { this.payload = payload; this.id = id; this.receipt = receipt; this.priority = priority; } /** * @return the payload */ public String getPayload() { return payload; } /** * @param payload the payload to set */ public void setPayload(String payload) { this.payload = payload; } /** * @return the id */ public String getId() { return id; } /** * @param id the id to set */ public void setId(String id) { this.id = id; } /** * @return Receipt attached to the message */ public String getReceipt() { return receipt; } /** * @param receipt Receipt attached to the message */ public void setReceipt(String receipt) { this.receipt = receipt; } /** * Gets the message priority * * @return priority of message. */ public int getPriority() { return priority; } /** * Sets the message priority (between 0 and 99). Higher priority message is retrieved ahead of * lower priority ones. * * @param priority the priority of message (between 0 and 99) */ public void setPriority(int priority) { this.priority = priority; } @Override public String toString() { return id; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Message message = (Message) o; return Objects.equals(payload, message.payload) && Objects.equals(id, message.id) && Objects.equals(priority, message.priority) && Objects.equals(receipt, message.receipt); } @Override public int hashCode() { return Objects.hash(payload, id, receipt, priority); } }
6,745
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/queue/DefaultEventQueueProcessor.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events.queue; import java.util.*; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.utils.TaskUtils; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.TaskModel.Status; import com.netflix.conductor.model.WorkflowModel; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; /** * Monitors and processes messages on the default event queues that Conductor listens on. * * <p>The default event queue type is controlled using the property: <code> * conductor.default-event-queue.type</code> */ @Component @ConditionalOnProperty( name = "conductor.default-event-queue-processor.enabled", havingValue = "true", matchIfMissing = true) public class DefaultEventQueueProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultEventQueueProcessor.class); private final Map<Status, ObservableQueue> queues; private final WorkflowExecutor workflowExecutor; private static final TypeReference<Map<String, Object>> _mapType = new TypeReference<>() {}; private final ObjectMapper objectMapper; public DefaultEventQueueProcessor( Map<Status, ObservableQueue> queues, WorkflowExecutor workflowExecutor, ObjectMapper objectMapper) { this.queues = queues; this.workflowExecutor = workflowExecutor; this.objectMapper = objectMapper; queues.forEach(this::startMonitor); LOGGER.info( "DefaultEventQueueProcessor initialized with {} queues", queues.entrySet().size()); } private void startMonitor(Status status, ObservableQueue queue) { queue.observe() .subscribe( (Message msg) -> { try { LOGGER.debug("Got message {}", msg.getPayload()); String payload = msg.getPayload(); JsonNode payloadJSON = objectMapper.readTree(payload); String externalId = getValue("externalId", payloadJSON); if (externalId == null || "".equals(externalId)) { LOGGER.error("No external Id found in the payload {}", payload); queue.ack(Collections.singletonList(msg)); return; } JsonNode json = objectMapper.readTree(externalId); String workflowId = getValue("workflowId", json); String taskRefName = getValue("taskRefName", json); String taskId = getValue("taskId", json); if (workflowId == null || "".equals(workflowId)) { // This is a bad message, we cannot process it LOGGER.error( "No workflow id found in the message. {}", payload); queue.ack(Collections.singletonList(msg)); return; } WorkflowModel workflow = workflowExecutor.getWorkflow(workflowId, true); Optional<TaskModel> optionalTaskModel; if (StringUtils.isNotEmpty(taskId)) { optionalTaskModel = workflow.getTasks().stream() .filter( task -> !task.getStatus().isTerminal() && task.getTaskId() .equals(taskId)) .findFirst(); } else if (StringUtils.isEmpty(taskRefName)) { LOGGER.error( "No taskRefName found in the message. If there is only one WAIT task, will mark it as completed. {}", payload); optionalTaskModel = workflow.getTasks().stream() .filter( task -> !task.getStatus().isTerminal() && task.getTaskType() .equals( TASK_TYPE_WAIT)) .findFirst(); } else { optionalTaskModel = workflow.getTasks().stream() .filter( task -> !task.getStatus().isTerminal() && TaskUtils .removeIterationFromTaskRefName( task .getReferenceTaskName()) .equals( taskRefName)) .findFirst(); } if (optionalTaskModel.isEmpty()) { LOGGER.error( "No matching tasks found to be marked as completed for workflow {}, taskRefName {}, taskId {}", workflowId, taskRefName, taskId); queue.ack(Collections.singletonList(msg)); return; } Task task = optionalTaskModel.get().toTask(); task.setStatus(TaskModel.mapToTaskStatus(status)); task.getOutputData() .putAll(objectMapper.convertValue(payloadJSON, _mapType)); workflowExecutor.updateTask(new TaskResult(task)); List<String> failures = queue.ack(Collections.singletonList(msg)); if (!failures.isEmpty()) { LOGGER.error("Not able to ack the messages {}", failures); } } catch (JsonParseException e) { LOGGER.error("Bad message? : {} ", msg, e); queue.ack(Collections.singletonList(msg)); } catch (NotFoundException nfe) { LOGGER.error( "Workflow ID specified is not valid for this environment"); queue.ack(Collections.singletonList(msg)); } catch (Exception e) { LOGGER.error("Error processing message: {}", msg, e); } }, (Throwable t) -> LOGGER.error(t.getMessage(), t)); LOGGER.info("QueueListener::STARTED...listening for " + queue.getName()); } private String getValue(String fieldName, JsonNode json) { JsonNode node = json.findValue(fieldName); if (node == null) { return null; } return node.textValue(); } public Map<String, Long> size() { Map<String, Long> size = new HashMap<>(); queues.forEach((key, queue) -> size.put(queue.getName(), queue.size())); return size; } public Map<Status, String> queues() { Map<Status, String> size = new HashMap<>(); queues.forEach((key, queue) -> size.put(key, queue.getURI())); return size; } public void updateByTaskRefName( String workflowId, String taskRefName, Map<String, Object> output, Status status) throws Exception { Map<String, Object> externalIdMap = new HashMap<>(); externalIdMap.put("workflowId", workflowId); externalIdMap.put("taskRefName", taskRefName); update(externalIdMap, output, status); } public void updateByTaskId( String workflowId, String taskId, Map<String, Object> output, Status status) throws Exception { Map<String, Object> externalIdMap = new HashMap<>(); externalIdMap.put("workflowId", workflowId); externalIdMap.put("taskId", taskId); update(externalIdMap, output, status); } private void update( Map<String, Object> externalIdMap, Map<String, Object> output, Status status) throws Exception { Map<String, Object> outputMap = new HashMap<>(); outputMap.put("externalId", objectMapper.writeValueAsString(externalIdMap)); outputMap.putAll(output); Message msg = new Message( UUID.randomUUID().toString(), objectMapper.writeValueAsString(outputMap), null); ObservableQueue queue = queues.get(status); if (queue == null) { throw new IllegalArgumentException( "There is no queue for handling " + status.toString() + " status"); } queue.publish(Collections.singletonList(msg)); } }
6,746
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorObservableQueue.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events.queue; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.metrics.Monitors; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Scheduler; /** * An {@link ObservableQueue} implementation using the underlying {@link QueueDAO} implementation. */ public class ConductorObservableQueue implements ObservableQueue { private static final Logger LOGGER = LoggerFactory.getLogger(ConductorObservableQueue.class); private static final String QUEUE_TYPE = "conductor"; private final String queueName; private final QueueDAO queueDAO; private final long pollTimeMS; private final int longPollTimeout; private final int pollCount; private final Scheduler scheduler; private volatile boolean running; ConductorObservableQueue( String queueName, QueueDAO queueDAO, ConductorProperties properties, Scheduler scheduler) { this.queueName = queueName; this.queueDAO = queueDAO; this.pollTimeMS = properties.getEventQueuePollInterval().toMillis(); this.pollCount = properties.getEventQueuePollCount(); this.longPollTimeout = (int) properties.getEventQueueLongPollTimeout().toMillis(); this.scheduler = scheduler; } @Override public Observable<Message> observe() { OnSubscribe<Message> subscriber = getOnSubscribe(); return Observable.create(subscriber); } @Override public List<String> ack(List<Message> messages) { for (Message msg : messages) { queueDAO.ack(queueName, msg.getId()); } return messages.stream().map(Message::getId).collect(Collectors.toList()); } public void setUnackTimeout(Message message, long unackTimeout) { queueDAO.setUnackTimeout(queueName, message.getId(), unackTimeout); } @Override public void publish(List<Message> messages) { queueDAO.push(queueName, messages); } @Override public long size() { return queueDAO.getSize(queueName); } @Override public String getType() { return QUEUE_TYPE; } @Override public String getName() { return queueName; } @Override public String getURI() { return queueName; } private List<Message> receiveMessages() { try { List<Message> messages = queueDAO.pollMessages(queueName, pollCount, longPollTimeout); Monitors.recordEventQueueMessagesProcessed(QUEUE_TYPE, queueName, messages.size()); Monitors.recordEventQueuePollSize(queueName, messages.size()); return messages; } catch (Exception exception) { LOGGER.error("Exception while getting messages from queueDAO", exception); Monitors.recordObservableQMessageReceivedErrors(QUEUE_TYPE); } return new ArrayList<>(); } private OnSubscribe<Message> getOnSubscribe() { return subscriber -> { Observable<Long> interval = Observable.interval(pollTimeMS, TimeUnit.MILLISECONDS, scheduler); interval.flatMap( (Long x) -> { if (!isRunning()) { LOGGER.debug( "Component stopped, skip listening for messages from Conductor Queue"); return Observable.from(Collections.emptyList()); } List<Message> messages = receiveMessages(); return Observable.from(messages); }) .subscribe(subscriber::onNext, subscriber::onError); }; } @Override public void start() { LOGGER.info("Started listening to {}:{}", getClass().getSimpleName(), queueName); running = true; } @Override public void stop() { LOGGER.info("Stopped listening to {}:{}", getClass().getSimpleName(), queueName); running = false; } @Override public boolean isRunning() { return running; } }
6,747
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/queue/ConductorEventQueueProvider.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events.queue; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.events.EventQueueProvider; import com.netflix.conductor.dao.QueueDAO; import rx.Scheduler; /** * Default provider for {@link com.netflix.conductor.core.events.queue.ObservableQueue} that listens * on the <i>conductor</i> queue prefix. * * <p><code>Set conductor.event-queues.default.enabled=false</code> to disable the default queue. * * @see ConductorObservableQueue */ @Component @ConditionalOnProperty( name = "conductor.event-queues.default.enabled", havingValue = "true", matchIfMissing = true) public class ConductorEventQueueProvider implements EventQueueProvider { private static final Logger LOGGER = LoggerFactory.getLogger(ConductorEventQueueProvider.class); private final Map<String, ObservableQueue> queues = new ConcurrentHashMap<>(); private final QueueDAO queueDAO; private final ConductorProperties properties; private final Scheduler scheduler; public ConductorEventQueueProvider( QueueDAO queueDAO, ConductorProperties properties, Scheduler scheduler) { this.queueDAO = queueDAO; this.properties = properties; this.scheduler = scheduler; } @Override public String getQueueType() { return "conductor"; } @Override @NonNull public ObservableQueue getQueue(String queueURI) { return queues.computeIfAbsent( queueURI, q -> new ConductorObservableQueue(queueURI, queueDAO, properties, scheduler)); } }
6,748
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/events/queue/ObservableQueue.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.events.queue; import java.util.List; import org.springframework.context.Lifecycle; import rx.Observable; public interface ObservableQueue extends Lifecycle { /** * @return An observable for the given queue */ Observable<Message> observe(); /** * @return Type of the queue */ String getType(); /** * @return Name of the queue */ String getName(); /** * @return URI identifier for the queue. */ String getURI(); /** * @param messages to be ack'ed * @return the id of the ones which could not be ack'ed */ List<String> ack(List<Message> messages); /** * @param messages to be Nack'ed */ default void nack(List<Message> messages) {} /** * @param messages Messages to be published */ void publish(List<Message> messages); /** * Used to determine if the queue supports unack/visibility timeout such that the messages will * re-appear on the queue after a specific period and are available to be picked up again and * retried. * * @return - false if the queue message need not be re-published to the queue for retriability - * true if the message must be re-published to the queue for retriability */ default boolean rePublishIfNoAck() { return false; } /** * Extend the lease of the unacknowledged message for longer period. * * @param message Message for which the timeout has to be changed * @param unackTimeout timeout in milliseconds for which the unack lease should be extended. * (replaces the current value with this value) */ void setUnackTimeout(Message message, long unackTimeout); /** * @return Size of the queue - no. messages pending. Note: Depending upon the implementation, * this can be an approximation */ long size(); /** Used to close queue instance prior to remove from queues */ default void close() {} }
6,749
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/exception/TerminateWorkflowException.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.exception; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import static com.netflix.conductor.model.WorkflowModel.Status.FAILED; public class TerminateWorkflowException extends RuntimeException { private final WorkflowModel.Status workflowStatus; private final TaskModel task; public TerminateWorkflowException(String reason) { this(reason, FAILED); } public TerminateWorkflowException(String reason, WorkflowModel.Status workflowStatus) { this(reason, workflowStatus, null); } public TerminateWorkflowException( String reason, WorkflowModel.Status workflowStatus, TaskModel task) { super(reason); this.workflowStatus = workflowStatus; this.task = task; } public WorkflowModel.Status getWorkflowStatus() { return workflowStatus; } public TaskModel getTask() { return task; } }
6,750
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/exception/ConflictException.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.exception; public class ConflictException extends RuntimeException { public ConflictException(String message) { super(message); } public ConflictException(String message, Object... args) { super(String.format(message, args)); } public ConflictException(String message, Throwable cause) { super(message, cause); } }
6,751
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/exception/TransientException.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.exception; public class TransientException extends RuntimeException { public TransientException(String message) { super(message); } public TransientException(String message, Throwable cause) { super(message, cause); } }
6,752
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/exception/NotFoundException.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.exception; public class NotFoundException extends RuntimeException { public NotFoundException(String message) { super(message); } public NotFoundException(String message, Object... args) { super(String.format(message, args)); } public NotFoundException(String message, Throwable cause) { super(message, cause); } }
6,753
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/exception/NonTransientException.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.exception; public class NonTransientException extends RuntimeException { public NonTransientException(String message) { super(message); } public NonTransientException(String message, Throwable cause) { super(message, cause); } }
6,754
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/event/WorkflowCreationEvent.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.event; import java.io.Serializable; import com.netflix.conductor.core.execution.StartWorkflowInput; public class WorkflowCreationEvent implements Serializable { private final StartWorkflowInput startWorkflowInput; public WorkflowCreationEvent(StartWorkflowInput startWorkflowInput) { this.startWorkflowInput = startWorkflowInput; } public StartWorkflowInput getStartWorkflowInput() { return startWorkflowInput; } }
6,755
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/event/WorkflowEvaluationEvent.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.event; import java.io.Serializable; import com.netflix.conductor.model.WorkflowModel; public final class WorkflowEvaluationEvent implements Serializable { private final WorkflowModel workflowModel; public WorkflowEvaluationEvent(WorkflowModel workflowModel) { this.workflowModel = workflowModel; } public WorkflowModel getWorkflowModel() { return workflowModel; } }
6,756
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core
Create_ds/conductor/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.metadata; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.netflix.conductor.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.core.exception.TerminateWorkflowException; import com.netflix.conductor.core.utils.Utils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.metrics.Monitors; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** * Populates metadata definitions within workflow objects. Benefits of loading and populating * metadata definitions upfront could be: * * <ul> * <li>Immutable definitions within a workflow execution with the added benefit of guaranteeing * consistency at runtime. * <li>Stress is reduced on the storage layer * </ul> */ @Component public class MetadataMapperService { public static final Logger LOGGER = LoggerFactory.getLogger(MetadataMapperService.class); private final MetadataDAO metadataDAO; public MetadataMapperService(MetadataDAO metadataDAO) { this.metadataDAO = metadataDAO; } public WorkflowDef lookupForWorkflowDefinition(String name, Integer version) { Optional<WorkflowDef> potentialDef = version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); // Check if the workflow definition is valid return potentialDef.orElseThrow( () -> { LOGGER.error( "There is no workflow defined with name {} and version {}", name, version); return new NotFoundException( "No such workflow defined. name=%s, version=%s", name, version); }); } @VisibleForTesting Optional<WorkflowDef> lookupWorkflowDefinition(String workflowName, int workflowVersion) { Utils.checkArgument( StringUtils.isNotBlank(workflowName), "Workflow name must be specified when searching for a definition"); return metadataDAO.getWorkflowDef(workflowName, workflowVersion); } @VisibleForTesting Optional<WorkflowDef> lookupLatestWorkflowDefinition(String workflowName) { Utils.checkArgument( StringUtils.isNotBlank(workflowName), "Workflow name must be specified when searching for a definition"); return metadataDAO.getLatestWorkflowDef(workflowName); } public WorkflowModel populateWorkflowWithDefinitions(WorkflowModel workflow) { Utils.checkNotNull(workflow, "workflow cannot be null"); WorkflowDef workflowDefinition = Optional.ofNullable(workflow.getWorkflowDefinition()) .orElseGet( () -> { WorkflowDef wd = lookupForWorkflowDefinition( workflow.getWorkflowName(), workflow.getWorkflowVersion()); workflow.setWorkflowDefinition(wd); return wd; }); workflowDefinition.collectTasks().forEach(this::populateWorkflowTaskWithDefinition); checkNotEmptyDefinitions(workflowDefinition); return workflow; } public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { Utils.checkNotNull(workflowDefinition, "workflowDefinition cannot be null"); workflowDefinition.collectTasks().forEach(this::populateWorkflowTaskWithDefinition); checkNotEmptyDefinitions(workflowDefinition); return workflowDefinition; } private void populateWorkflowTaskWithDefinition(WorkflowTask workflowTask) { Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null"); if (shouldPopulateTaskDefinition(workflowTask)) { workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); if (workflowTask.getTaskDefinition() == null && workflowTask.getType().equals(TaskType.SIMPLE.name())) { // ad-hoc task def workflowTask.setTaskDefinition(new TaskDef(workflowTask.getName())); } } if (workflowTask.getType().equals(TaskType.SUB_WORKFLOW.name())) { populateVersionForSubWorkflow(workflowTask); } } private void populateVersionForSubWorkflow(WorkflowTask workflowTask) { Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null"); SubWorkflowParams subworkflowParams = workflowTask.getSubWorkflowParam(); if (subworkflowParams.getVersion() == null) { String subWorkflowName = subworkflowParams.getName(); Integer subWorkflowVersion = metadataDAO .getLatestWorkflowDef(subWorkflowName) .map(WorkflowDef::getVersion) .orElseThrow( () -> { String reason = String.format( "The Task %s defined as a sub-workflow has no workflow definition available ", subWorkflowName); LOGGER.error(reason); return new TerminateWorkflowException(reason); }); subworkflowParams.setVersion(subWorkflowVersion); } } private void checkNotEmptyDefinitions(WorkflowDef workflowDefinition) { Utils.checkNotNull(workflowDefinition, "WorkflowDefinition cannot be null"); // Obtain the names of the tasks with missing definitions Set<String> missingTaskDefinitionNames = workflowDefinition.collectTasks().stream() .filter( workflowTask -> workflowTask.getType().equals(TaskType.SIMPLE.name())) .filter(this::shouldPopulateTaskDefinition) .map(WorkflowTask::getName) .collect(Collectors.toSet()); if (!missingTaskDefinitionNames.isEmpty()) { LOGGER.error( "Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefinitionNames); Monitors.recordWorkflowStartError( workflowDefinition.getName(), WorkflowContext.get().getClientApp()); throw new IllegalArgumentException( "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefinitionNames); } } public TaskModel populateTaskWithDefinition(TaskModel task) { Utils.checkNotNull(task, "Task cannot be null"); populateWorkflowTaskWithDefinition(task.getWorkflowTask()); return task; } @VisibleForTesting boolean shouldPopulateTaskDefinition(WorkflowTask workflowTask) { Utils.checkNotNull(workflowTask, "WorkflowTask cannot be null"); Utils.checkNotNull(workflowTask.getType(), "WorkflowTask type cannot be null"); return workflowTask.getTaskDefinition() == null && StringUtils.isNotBlank(workflowTask.getName()); } }
6,757
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/EventHandlerDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import java.util.List; import com.netflix.conductor.common.metadata.events.EventHandler; /** An abstraction to enable different Event Handler store implementations */ public interface EventHandlerDAO { /** * @param eventHandler Event handler to be added. * <p><em>NOTE:</em> Will throw an exception if an event handler already exists with the * name */ void addEventHandler(EventHandler eventHandler); /** * @param eventHandler Event handler to be updated. */ void updateEventHandler(EventHandler eventHandler); /** * @param name Removes the event handler from the system */ void removeEventHandler(String name); /** * @return All the event handlers registered in the system */ List<EventHandler> getAllEventHandlers(); /** * @param event name of the event * @param activeOnly if true, returns only the active handlers * @return Returns the list of all the event handlers for a given event */ List<EventHandler> getEventHandlersForEvent(String event, boolean activeOnly); }
6,758
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import java.util.List; import java.util.concurrent.CompletableFuture; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.events.queue.Message; /** DAO to index the workflow and task details for searching. */ public interface IndexDAO { /** Setup method in charge or initializing/populating the index. */ void setup() throws Exception; /** * This method should return an unique identifier of the indexed doc * * @param workflow Workflow to be indexed */ void indexWorkflow(WorkflowSummary workflow); /** * This method should return an unique identifier of the indexed doc * * @param workflow Workflow to be indexed * @return CompletableFuture of type void */ CompletableFuture<Void> asyncIndexWorkflow(WorkflowSummary workflow); /** * @param task Task to be indexed */ void indexTask(TaskSummary task); /** * @param task Task to be indexed asynchronously * @return CompletableFuture of type void */ CompletableFuture<Void> asyncIndexTask(TaskSummary task); /** * @param query SQL like query for workflow search parameters. * @param freeText Additional query in free text. Lucene syntax * @param start start start index for pagination * @param count count # of workflow ids to be returned * @param sort sort options * @return List of workflow ids for the matching query */ SearchResult<String> searchWorkflows( String query, String freeText, int start, int count, List<String> sort); /** * @param query SQL like query for workflow search parameters. * @param freeText Additional query in free text. Lucene syntax * @param start start start index for pagination * @param count count # of workflow ids to be returned * @param sort sort options * @return List of workflows for the matching query */ SearchResult<WorkflowSummary> searchWorkflowSummary( String query, String freeText, int start, int count, List<String> sort); /** * @param query SQL like query for task search parameters. * @param freeText Additional query in free text. Lucene syntax * @param start start start index for pagination * @param count count # of task ids to be returned * @param sort sort options * @return List of task ids for the matching query */ SearchResult<String> searchTasks( String query, String freeText, int start, int count, List<String> sort); /** * @param query SQL like query for task search parameters. * @param freeText Additional query in free text. Lucene syntax * @param start start start index for pagination * @param count count # of task ids to be returned * @param sort sort options * @return List of tasks for the matching query */ SearchResult<TaskSummary> searchTaskSummary( String query, String freeText, int start, int count, List<String> sort); /** * Remove the workflow index * * @param workflowId workflow to be removed */ void removeWorkflow(String workflowId); /** * Remove the workflow index * * @param workflowId workflow to be removed * @return CompletableFuture of type void */ CompletableFuture<Void> asyncRemoveWorkflow(String workflowId); /** * Updates the index * * @param workflowInstanceId id of the workflow * @param keys keys to be updated * @param values values. Number of keys and values MUST match. */ void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values); /** * Updates the index * * @param workflowInstanceId id of the workflow * @param keys keys to be updated * @param values values. Number of keys and values MUST match. * @return CompletableFuture of type void */ CompletableFuture<Void> asyncUpdateWorkflow( String workflowInstanceId, String[] keys, Object[] values); /** * Remove the task index * * @param workflowId workflow containing task * @param taskId task to be removed */ void removeTask(String workflowId, String taskId); /** * Remove the task index asynchronously * * @param workflowId workflow containing task * @param taskId task to be removed * @return CompletableFuture of type void */ CompletableFuture<Void> asyncRemoveTask(String workflowId, String taskId); /** * Updates the index * * @param workflowId id of the workflow * @param taskId id of the task * @param keys keys to be updated * @param values values. Number of keys and values MUST match. */ void updateTask(String workflowId, String taskId, String[] keys, Object[] values); /** * Updates the index * * @param workflowId id of the workflow * @param taskId id of the task * @param keys keys to be updated * @param values values. Number of keys and values MUST match. * @return CompletableFuture of type void */ CompletableFuture<Void> asyncUpdateTask( String workflowId, String taskId, String[] keys, Object[] values); /** * Retrieves a specific field from the index * * @param workflowInstanceId id of the workflow * @param key field to be retrieved * @return value of the field as string */ String get(String workflowInstanceId, String key); /** * @param logs Task Execution logs to be indexed */ void addTaskExecutionLogs(List<TaskExecLog> logs); /** * @param logs Task Execution logs to be indexed * @return CompletableFuture of type void */ CompletableFuture<Void> asyncAddTaskExecutionLogs(List<TaskExecLog> logs); /** * @param taskId Id of the task for which to fetch the execution logs * @return Returns the task execution logs for given task id */ List<TaskExecLog> getTaskExecutionLogs(String taskId); /** * @param eventExecution Event Execution to be indexed */ void addEventExecution(EventExecution eventExecution); List<EventExecution> getEventExecutions(String event); /** * @param eventExecution Event Execution to be indexed * @return CompletableFuture of type void */ CompletableFuture<Void> asyncAddEventExecution(EventExecution eventExecution); /** * Adds an incoming external message into the index * * @param queue Name of the registered queue * @param msg Message */ void addMessage(String queue, Message msg); /** * Adds an incoming external message into the index * * @param queue Name of the registered queue * @param message {@link Message} * @return CompletableFuture of type Void */ CompletableFuture<Void> asyncAddMessage(String queue, Message message); List<Message> getMessages(String queue); /** * Search for Workflows completed or failed beyond archiveTtlDays * * @param indexName Name of the index to search * @param archiveTtlDays Archival Time to Live * @return List of worlflow Ids matching the pattern */ List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays); /** * Get total workflow counts that matches the query * * @param query SQL like query for workflow search parameters. * @param freeText Additional query in free text. Lucene syntax * @return Number of matches for the query */ long getWorkflowCount(String query, String freeText); }
6,759
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/QueueDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import java.util.List; import java.util.Map; import com.netflix.conductor.core.events.queue.Message; /** DAO responsible for managing queuing for the tasks. */ public interface QueueDAO { /** * @param queueName name of the queue * @param id message id * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. * (for timed queues) */ void push(String queueName, String id, long offsetTimeInSecond); /** * @param queueName name of the queue * @param id message id * @param priority message priority (between 0 and 99) * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. * (for timed queues) */ void push(String queueName, String id, int priority, long offsetTimeInSecond); /** * @param queueName Name of the queue * @param messages messages to be pushed. */ void push(String queueName, List<Message> messages); /** * @param queueName Name of the queue * @param id message id * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. * (for timed queues) * @return true if the element was added to the queue. false otherwise indicating the element * already exists in the queue. */ boolean pushIfNotExists(String queueName, String id, long offsetTimeInSecond); /** * @param queueName Name of the queue * @param id message id * @param priority message priority (between 0 and 99) * @param offsetTimeInSecond time in seconds, after which the message should be marked visible. * (for timed queues) * @return true if the element was added to the queue. false otherwise indicating the element * already exists in the queue. */ boolean pushIfNotExists(String queueName, String id, int priority, long offsetTimeInSecond); /** * @param queueName Name of the queue * @param count number of messages to be read from the queue * @param timeout timeout in milliseconds * @return list of elements from the named queue */ List<String> pop(String queueName, int count, int timeout); /** * @param queueName Name of the queue * @param count number of messages to be read from the queue * @param timeout timeout in milliseconds * @return list of elements from the named queue */ List<Message> pollMessages(String queueName, int count, int timeout); /** * @param queueName Name of the queue * @param messageId Message id */ void remove(String queueName, String messageId); /** * @param queueName Name of the queue * @return size of the queue */ int getSize(String queueName); /** * @param queueName Name of the queue * @param messageId Message Id * @return true if the message was found and ack'ed */ boolean ack(String queueName, String messageId); /** * Extend the lease of the unacknowledged message for longer period. * * @param queueName Name of the queue * @param messageId Message Id * @param unackTimeout timeout in milliseconds for which the unack lease should be extended. * (replaces the current value with this value) * @return true if the message was updated with extended lease. false otherwise. */ boolean setUnackTimeout(String queueName, String messageId, long unackTimeout); /** * @param queueName Name of the queue */ void flush(String queueName); /** * @return key : queue name, value: size of the queue */ Map<String, Long> queuesDetail(); /** * @return key : queue name, value: map of shard name to size and unack queue size */ Map<String, Map<String, Map<String, Long>>> queuesDetailVerbose(); default void processUnacks(String queueName) {} /** * Resets the offsetTime on a message to 0, without pulling out the message from the queue * * @param queueName name of the queue * @param id message id * @return true if the message is in queue and the change was successful else returns false */ boolean resetOffsetTime(String queueName, String id); /** * Postpone a given message with postponeDurationInSeconds, so that the message won't be * available for further polls until specified duration. By default, the message is removed and * pushed backed with postponeDurationInSeconds to be backwards compatible. * * @param queueName name of the queue * @param messageId message id * @param priority message priority (between 0 and 99) * @param postponeDurationInSeconds duration in seconds by which the message is to be postponed */ default boolean postpone( String queueName, String messageId, int priority, long postponeDurationInSeconds) { remove(queueName, messageId); push(queueName, messageId, priority, postponeDurationInSeconds); return true; } /** * Check if the message with given messageId exists in the Queue. * * @param queueName * @param messageId * @return */ default boolean containsMessage(String queueName, String messageId) { throw new UnsupportedOperationException( "Please ensure your provided Queue implementation overrides and implements this method."); } }
6,760
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/ConcurrentExecutionLimitDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.model.TaskModel; /** * A contract to support concurrency limits of tasks. * * @since v3.3.5. */ public interface ConcurrentExecutionLimitDAO { default void addTaskToLimit(TaskModel task) { throw new UnsupportedOperationException( getClass() + " does not support addTaskToLimit method."); } default void removeTaskFromLimit(TaskModel task) { throw new UnsupportedOperationException( getClass() + " does not support removeTaskFromLimit method."); } /** * Checks if the number of tasks in progress for the given taskDef will exceed the limit if the * task is scheduled to be in progress (given to the worker or for system tasks start() method * called) * * @param task The task to be executed. Limit is set in the Task's definition * @return true if by executing this task, the limit is breached. false otherwise. * @see TaskDef#concurrencyLimit() */ boolean exceedsLimit(TaskModel task); }
6,761
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/ExecutionDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import java.util.List; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; /** Data access layer for storing workflow executions */ public interface ExecutionDAO { /** * @param taskName Name of the task * @param workflowId Workflow instance id * @return List of pending tasks (in_progress) */ List<TaskModel> getPendingTasksByWorkflow(String taskName, String workflowId); /** * @param taskType Type of task * @param startKey start * @param count number of tasks to return * @return List of tasks starting from startKey */ List<TaskModel> getTasks(String taskType, String startKey, int count); /** * @param tasks tasks to be created * @return List of tasks that were created. * <p><b>Note on the primary key constraint</b> * <p>For a given task reference name and retryCount should be considered unique/primary * key. Given two tasks with the same reference name and retryCount only one should be added * to the database. */ List<TaskModel> createTasks(List<TaskModel> tasks); /** * @param task Task to be updated */ void updateTask(TaskModel task); /** * Checks if the number of tasks in progress for the given taskDef will exceed the limit if the * task is scheduled to be in progress (given to the worker or for system tasks start() method * called) * * @param task The task to be executed. Limit is set in the Task's definition * @return true if by executing this task, the limit is breached. false otherwise. * @see TaskDef#concurrencyLimit() * @deprecated Since v3.3.5. Use {@link ConcurrentExecutionLimitDAO#exceedsLimit(TaskModel)}. */ @Deprecated default boolean exceedsInProgressLimit(TaskModel task) { throw new UnsupportedOperationException( getClass() + "does not support exceedsInProgressLimit"); } /** * @param taskId id of the task to be removed. * @return true if the deletion is successful, false otherwise. */ boolean removeTask(String taskId); /** * @param taskId Task instance id * @return Task */ TaskModel getTask(String taskId); /** * @param taskIds Task instance ids * @return List of tasks */ List<TaskModel> getTasks(List<String> taskIds); /** * @param taskType Type of the task for which to retrieve the list of pending tasks * @return List of pending tasks */ List<TaskModel> getPendingTasksForTaskType(String taskType); /** * @param workflowId Workflow instance id * @return List of tasks for the given workflow instance id */ List<TaskModel> getTasksForWorkflow(String workflowId); /** * @param workflow Workflow to be created * @return Id of the newly created workflow */ String createWorkflow(WorkflowModel workflow); /** * @param workflow Workflow to be updated * @return Id of the updated workflow */ String updateWorkflow(WorkflowModel workflow); /** * @param workflowId workflow instance id * @return true if the deletion is successful, false otherwise */ boolean removeWorkflow(String workflowId); /** * Removes the workflow with ttl seconds * * @param workflowId workflowId workflow instance id * @param ttlSeconds time to live in seconds. * @return */ boolean removeWorkflowWithExpiry(String workflowId, int ttlSeconds); /** * @param workflowType Workflow Type * @param workflowId workflow instance id */ void removeFromPendingWorkflow(String workflowType, String workflowId); /** * @param workflowId workflow instance id * @return Workflow */ WorkflowModel getWorkflow(String workflowId); /** * @param workflowId workflow instance id * @param includeTasks if set, includes the tasks (pending and completed) sorted by Task * Sequence number in Workflow. * @return Workflow instance details */ WorkflowModel getWorkflow(String workflowId, boolean includeTasks); /** * @param workflowName name of the workflow * @param version the workflow version * @return List of workflow ids which are running */ List<String> getRunningWorkflowIds(String workflowName, int version); /** * @param workflowName Name of the workflow * @param version the workflow version * @return List of workflows that are running */ List<WorkflowModel> getPendingWorkflowsByType(String workflowName, int version); /** * @param workflowName Name of the workflow * @return No. of running workflows */ long getPendingWorkflowCount(String workflowName); /** * @param taskDefName Name of the task * @return Number of task currently in IN_PROGRESS status */ long getInProgressTaskCount(String taskDefName); /** * @param workflowName Name of the workflow * @param startTime epoch time * @param endTime epoch time * @return List of workflows between start and end time */ List<WorkflowModel> getWorkflowsByType(String workflowName, Long startTime, Long endTime); /** * @param workflowName workflow name * @param correlationId Correlation Id * @param includeTasks Option to includeTasks in results * @return List of workflows by correlation id */ List<WorkflowModel> getWorkflowsByCorrelationId( String workflowName, String correlationId, boolean includeTasks); /** * @return true, if the DAO implementation is capable of searching across workflows false, if * the DAO implementation cannot perform searches across workflows (and needs to use * indexDAO) */ boolean canSearchAcrossWorkflows(); // Events /** * @param eventExecution Event Execution to be stored * @return true if the event was added. false otherwise when the event by id is already already * stored. */ boolean addEventExecution(EventExecution eventExecution); /** * @param eventExecution Event execution to be updated */ void updateEventExecution(EventExecution eventExecution); /** * @param eventExecution Event execution to be removed */ void removeEventExecution(EventExecution eventExecution); }
6,762
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import java.util.List; import java.util.Optional; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; /** Data access layer for the workflow metadata - task definitions and workflow definitions */ public interface MetadataDAO { /** * @param taskDef task definition to be created */ TaskDef createTaskDef(TaskDef taskDef); /** * @param taskDef task definition to be updated. * @return name of the task definition */ TaskDef updateTaskDef(TaskDef taskDef); /** * @param name Name of the task * @return Task Definition */ TaskDef getTaskDef(String name); /** * @return All the task definitions */ List<TaskDef> getAllTaskDefs(); /** * @param name Name of the task */ void removeTaskDef(String name); /** * @param def workflow definition */ void createWorkflowDef(WorkflowDef def); /** * @param def workflow definition */ void updateWorkflowDef(WorkflowDef def); /** * @param name Name of the workflow * @return Workflow Definition */ Optional<WorkflowDef> getLatestWorkflowDef(String name); /** * @param name Name of the workflow * @param version version * @return workflow definition */ Optional<WorkflowDef> getWorkflowDef(String name, int version); /** * @param name Name of the workflow definition to be removed * @param version Version of the workflow definition to be removed */ void removeWorkflowDef(String name, Integer version); /** * @return List of all the workflow definitions */ List<WorkflowDef> getAllWorkflowDefs(); /** * @return List the latest versions of the workflow definitions */ List<WorkflowDef> getAllWorkflowDefsLatestVersions(); }
6,763
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/RateLimitingDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.model.TaskModel; /** An abstraction to enable different Rate Limiting implementations */ public interface RateLimitingDAO { /** * Checks if the Task is rate limited or not based on the {@link * TaskModel#getRateLimitPerFrequency()} and {@link TaskModel#getRateLimitFrequencyInSeconds()} * * @param task: which needs to be evaluated whether it is rateLimited or not * @return true: If the {@link TaskModel} is rateLimited false: If the {@link TaskModel} is not * rateLimited */ boolean exceedsRateLimitPerFrequency(TaskModel task, TaskDef taskDef); }
6,764
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/dao/PollDataDAO.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; import java.util.List; import com.netflix.conductor.common.metadata.tasks.PollData; /** An abstraction to enable different PollData store implementations */ public interface PollDataDAO { /** * Updates the {@link PollData} information with the most recently polled data for a task queue. * * @param taskDefName name of the task as specified in the task definition * @param domain domain in which this task is being polled from * @param workerId the identifier of the worker polling for this task */ void updateLastPollData(String taskDefName, String domain, String workerId); /** * Retrieve the {@link PollData} for the given task in the given domain. * * @param taskDefName name of the task as specified in the task definition * @param domain domain for which {@link PollData} is being requested * @return the {@link PollData} for the given task queue in the specified domain */ PollData getPollData(String taskDefName, String domain); /** * Retrieve the {@link PollData} for the given task across all domains. * * @param taskDefName name of the task as specified in the task definition * @return the {@link PollData} for the given task queue in all domains */ List<PollData> getPollData(String taskDefName); /** * Retrieve the {@link PollData} for all task types * * @return the {@link PollData} for all task types */ default List<PollData> getAllPollData() { throw new UnsupportedOperationException( "The selected PollDataDAO (" + this.getClass().getSimpleName() + ") does not implement the getAllPollData() method"); } }
6,765
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/validations/WorkflowTaskTypeConstraint.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.validations; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.text.ParseException; import java.time.format.DateTimeParseException; import java.util.Optional; import javax.validation.Constraint; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; import javax.validation.Payload; import org.apache.commons.lang3.StringUtils; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.utils.DateTimeUtils; import static com.netflix.conductor.core.execution.tasks.Terminate.getTerminationStatusParameter; import static com.netflix.conductor.core.execution.tasks.Terminate.validateInputStatus; import static com.netflix.conductor.core.execution.tasks.Wait.DURATION_INPUT; import static com.netflix.conductor.core.execution.tasks.Wait.UNTIL_INPUT; import static java.lang.annotation.ElementType.ANNOTATION_TYPE; import static java.lang.annotation.ElementType.TYPE; /** * This constraint class validates following things. 1. Correct parameters are set depending on task * type. */ @Documented @Constraint(validatedBy = WorkflowTaskTypeConstraint.WorkflowTaskValidator.class) @Target({TYPE, ANNOTATION_TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface WorkflowTaskTypeConstraint { String message() default ""; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; class WorkflowTaskValidator implements ConstraintValidator<WorkflowTaskTypeConstraint, WorkflowTask> { final String PARAM_REQUIRED_STRING_FORMAT = "%s field is required for taskType: %s taskName: %s"; @Override public void initialize(WorkflowTaskTypeConstraint constraintAnnotation) {} @Override public boolean isValid(WorkflowTask workflowTask, ConstraintValidatorContext context) { context.disableDefaultConstraintViolation(); boolean valid = true; // depending on task type check if required parameters are set or not switch (workflowTask.getType()) { case TaskType.TASK_TYPE_EVENT: valid = isEventTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_DECISION: valid = isDecisionTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_SWITCH: valid = isSwitchTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_DYNAMIC: valid = isDynamicTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC: valid = isDynamicForkJoinValid(workflowTask, context); break; case TaskType.TASK_TYPE_HTTP: valid = isHttpTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_FORK_JOIN: valid = isForkJoinTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_TERMINATE: valid = isTerminateTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_KAFKA_PUBLISH: valid = isKafkaPublishTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_DO_WHILE: valid = isDoWhileTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_SUB_WORKFLOW: valid = isSubWorkflowTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_JSON_JQ_TRANSFORM: valid = isJSONJQTransformTaskValid(workflowTask, context); break; case TaskType.TASK_TYPE_WAIT: valid = isWaitTaskValid(workflowTask, context); break; } return valid; } private boolean isEventTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; if (workflowTask.getSink() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "sink", TaskType.TASK_TYPE_EVENT, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isDecisionTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; if (workflowTask.getCaseValueParam() == null && workflowTask.getCaseExpression() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "caseValueParam or caseExpression", TaskType.DECISION, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } if (workflowTask.getDecisionCases() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "decisionCases", TaskType.DECISION, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } else if ((workflowTask.getDecisionCases() != null || workflowTask.getCaseExpression() != null) && (workflowTask.getDecisionCases().size() == 0)) { String message = String.format( "decisionCases should have atleast one task for taskType: %s taskName: %s", TaskType.DECISION, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isSwitchTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; if (workflowTask.getEvaluatorType() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "evaluatorType", TaskType.SWITCH, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } else if (workflowTask.getExpression() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "expression", TaskType.SWITCH, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } if (workflowTask.getDecisionCases() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "decisionCases", TaskType.SWITCH, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } else if (workflowTask.getDecisionCases() != null && workflowTask.getDecisionCases().size() == 0) { String message = String.format( "decisionCases should have atleast one task for taskType: %s taskName: %s", TaskType.SWITCH, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isDoWhileTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; if (workflowTask.getLoopCondition() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "loopExpression", TaskType.DO_WHILE, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } if (workflowTask.getLoopOver() == null || workflowTask.getLoopOver().size() == 0) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "loopover", TaskType.DO_WHILE, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isDynamicTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; if (workflowTask.getDynamicTaskNameParam() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "dynamicTaskNameParam", TaskType.DYNAMIC, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isWaitTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; String duration = Optional.ofNullable(workflowTask.getInputParameters().get(DURATION_INPUT)) .orElse("") .toString(); String until = Optional.ofNullable(workflowTask.getInputParameters().get(UNTIL_INPUT)) .orElse("") .toString(); if (StringUtils.isNotBlank(duration) && StringUtils.isNotBlank(until)) { String message = "Both 'duration' and 'until' specified. Please provide only one input"; context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } try { if (StringUtils.isNotBlank(duration) && !(duration.startsWith("${"))) { DateTimeUtils.parseDuration(duration); } else if (StringUtils.isNotBlank(until) && !(until.startsWith("${"))) { DateTimeUtils.parseDate(until); } } catch (DateTimeParseException e) { String message = "Unable to parse date "; context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } catch (IllegalArgumentException e) { String message = "Either date or duration is passed as null "; context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } catch (ParseException e) { String message = "Unable to parse date "; context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } catch (Exception e) { String message = "Wait time specified is invalid. The duration must be in "; context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isDynamicForkJoinValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; // For DYNAMIC_FORK_JOIN_TASK support dynamicForkJoinTasksParam or combination of // dynamicForkTasksParam and dynamicForkTasksInputParamName. // Both are not allowed. if (workflowTask.getDynamicForkJoinTasksParam() != null && (workflowTask.getDynamicForkTasksParam() != null || workflowTask.getDynamicForkTasksInputParamName() != null)) { String message = String.format( "dynamicForkJoinTasksParam or combination of dynamicForkTasksInputParamName and dynamicForkTasksParam cam be used for taskType: %s taskName: %s", TaskType.FORK_JOIN_DYNAMIC, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); return false; } if (workflowTask.getDynamicForkJoinTasksParam() != null) { return valid; } else { if (workflowTask.getDynamicForkTasksParam() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "dynamicForkTasksParam", TaskType.FORK_JOIN_DYNAMIC, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } if (workflowTask.getDynamicForkTasksInputParamName() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "dynamicForkTasksInputParamName", TaskType.FORK_JOIN_DYNAMIC, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } } return valid; } private boolean isHttpTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; boolean isInputParameterSet = false; boolean isInputTemplateSet = false; // Either http_request in WorkflowTask inputParam should be set or in inputTemplate // Taskdef should be set if (workflowTask.getInputParameters() != null && workflowTask.getInputParameters().containsKey("http_request")) { isInputParameterSet = true; } TaskDef taskDef = Optional.ofNullable(workflowTask.getTaskDefinition()) .orElse( ValidationContext.getMetadataDAO() .getTaskDef(workflowTask.getName())); if (taskDef != null && taskDef.getInputTemplate() != null && taskDef.getInputTemplate().containsKey("http_request")) { isInputTemplateSet = true; } if (!(isInputParameterSet || isInputTemplateSet)) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "inputParameters.http_request", TaskType.HTTP, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isForkJoinTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; if (workflowTask.getForkTasks() != null && (workflowTask.getForkTasks().size() == 0)) { String message = String.format( "forkTasks should have atleast one task for taskType: %s taskName: %s", TaskType.FORK_JOIN, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isTerminateTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; Object inputStatusParam = workflowTask.getInputParameters().get(getTerminationStatusParameter()); if (workflowTask.isOptional()) { String message = String.format( "terminate task cannot be optional, taskName: %s", workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } if (inputStatusParam == null || !validateInputStatus(inputStatusParam.toString())) { String message = String.format( "terminate task must have an %s parameter and must be set to COMPLETED or FAILED, taskName: %s", getTerminationStatusParameter(), workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isKafkaPublishTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; boolean isInputParameterSet = false; boolean isInputTemplateSet = false; // Either kafka_request in WorkflowTask inputParam should be set or in inputTemplate // Taskdef should be set if (workflowTask.getInputParameters() != null && workflowTask.getInputParameters().containsKey("kafka_request")) { isInputParameterSet = true; } TaskDef taskDef = Optional.ofNullable(workflowTask.getTaskDefinition()) .orElse( ValidationContext.getMetadataDAO() .getTaskDef(workflowTask.getName())); if (taskDef != null && taskDef.getInputTemplate() != null && taskDef.getInputTemplate().containsKey("kafka_request")) { isInputTemplateSet = true; } if (!(isInputParameterSet || isInputTemplateSet)) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "inputParameters.kafka_request", TaskType.KAFKA_PUBLISH, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isSubWorkflowTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; if (workflowTask.getSubWorkflowParam() == null) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "subWorkflowParam", TaskType.SUB_WORKFLOW, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } private boolean isJSONJQTransformTaskValid( WorkflowTask workflowTask, ConstraintValidatorContext context) { boolean valid = true; boolean isInputParameterSet = false; boolean isInputTemplateSet = false; // Either queryExpression in WorkflowTask inputParam should be set or in inputTemplate // Taskdef should be set if (workflowTask.getInputParameters() != null && workflowTask.getInputParameters().containsKey("queryExpression")) { isInputParameterSet = true; } TaskDef taskDef = Optional.ofNullable(workflowTask.getTaskDefinition()) .orElse( ValidationContext.getMetadataDAO() .getTaskDef(workflowTask.getName())); if (taskDef != null && taskDef.getInputTemplate() != null && taskDef.getInputTemplate().containsKey("queryExpression")) { isInputTemplateSet = true; } if (!(isInputParameterSet || isInputTemplateSet)) { String message = String.format( PARAM_REQUIRED_STRING_FORMAT, "inputParameters.queryExpression", TaskType.JSON_JQ_TRANSFORM, workflowTask.getName()); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); valid = false; } return valid; } } }
6,766
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/validations/ValidationContext.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.validations; import com.netflix.conductor.dao.MetadataDAO; /** * This context is defined to get access to {@link MetadataDAO} inside {@link * WorkflowTaskTypeConstraint} constraint validator to validate {@link * com.netflix.conductor.common.metadata.workflow.WorkflowTask}. */ public class ValidationContext { private static MetadataDAO metadataDAO; public static void initialize(MetadataDAO metadataDAO) { ValidationContext.metadataDAO = metadataDAO; } public static MetadataDAO getMetadataDAO() { return metadataDAO; } }
6,767
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/annotations/VisibleForTesting.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.annotations; import java.lang.annotation.*; /** * Annotates a program element that exists, or is more widely visible than otherwise necessary, only * for use in test code. */ @Retention(RetentionPolicy.CLASS) @Target({ElementType.FIELD, ElementType.TYPE, ElementType.METHOD}) @Documented public @interface VisibleForTesting {}
6,768
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/annotations/Audit.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.annotations; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** Mark service for custom audit implementation */ @Target({TYPE}) @Retention(RUNTIME) public @interface Audit {}
6,769
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/annotations/Trace.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.annotations; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; @Target({TYPE}) @Retention(RUNTIME) public @interface Trace {}
6,770
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/model/TaskModel.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.model; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.BeanUtils; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.protobuf.Any; public class TaskModel { public enum Status { IN_PROGRESS(false, true, true), CANCELED(true, false, false), FAILED(true, false, true), FAILED_WITH_TERMINAL_ERROR(true, false, false), COMPLETED(true, true, true), COMPLETED_WITH_ERRORS(true, true, true), SCHEDULED(false, true, true), TIMED_OUT(true, false, true), SKIPPED(true, true, false); private final boolean terminal; private final boolean successful; private final boolean retriable; Status(boolean terminal, boolean successful, boolean retriable) { this.terminal = terminal; this.successful = successful; this.retriable = retriable; } public boolean isTerminal() { return terminal; } public boolean isSuccessful() { return successful; } public boolean isRetriable() { return retriable; } } private String taskType; private Status status; private String referenceTaskName; private int retryCount; private int seq; private String correlationId; private int pollCount; private String taskDefName; /** Time when the task was scheduled */ private long scheduledTime; /** Time when the task was first polled */ private long startTime; /** Time when the task completed executing */ private long endTime; /** Time when the task was last updated */ private long updateTime; private int startDelayInSeconds; private String retriedTaskId; private boolean retried; private boolean executed; private boolean callbackFromWorker = true; private long responseTimeoutSeconds; private String workflowInstanceId; private String workflowType; private String taskId; private String reasonForIncompletion; private long callbackAfterSeconds; private String workerId; private WorkflowTask workflowTask; private String domain; private Any inputMessage; private Any outputMessage; private int rateLimitPerFrequency; private int rateLimitFrequencyInSeconds; private String externalInputPayloadStoragePath; private String externalOutputPayloadStoragePath; private int workflowPriority; private String executionNameSpace; private String isolationGroupId; private int iteration; private String subWorkflowId; // Timeout after which the wait task should be marked as completed private long waitTimeout; /** * Used to note that a sub workflow associated with SUB_WORKFLOW task has an action performed on * it directly. */ private boolean subworkflowChanged; @JsonIgnore private Map<String, Object> inputPayload = new HashMap<>(); @JsonIgnore private Map<String, Object> outputPayload = new HashMap<>(); @JsonIgnore private Map<String, Object> inputData = new HashMap<>(); @JsonIgnore private Map<String, Object> outputData = new HashMap<>(); public String getTaskType() { return taskType; } public void setTaskType(String taskType) { this.taskType = taskType; } public Status getStatus() { return status; } public void setStatus(Status status) { this.status = status; } @JsonIgnore public Map<String, Object> getInputData() { if (!inputPayload.isEmpty() && !inputData.isEmpty()) { inputData.putAll(inputPayload); inputPayload = new HashMap<>(); return inputData; } else if (inputPayload.isEmpty()) { return inputData; } else { return inputPayload; } } @JsonIgnore public void setInputData(Map<String, Object> inputData) { if (inputData == null) { inputData = new HashMap<>(); } this.inputData = inputData; } /** * @deprecated Used only for JSON serialization and deserialization. */ @JsonProperty("inputData") @Deprecated public void setRawInputData(Map<String, Object> inputData) { setInputData(inputData); } /** * @deprecated Used only for JSON serialization and deserialization. */ @JsonProperty("inputData") @Deprecated public Map<String, Object> getRawInputData() { return inputData; } public String getReferenceTaskName() { return referenceTaskName; } public void setReferenceTaskName(String referenceTaskName) { this.referenceTaskName = referenceTaskName; } public int getRetryCount() { return retryCount; } public void setRetryCount(int retryCount) { this.retryCount = retryCount; } public int getSeq() { return seq; } public void setSeq(int seq) { this.seq = seq; } public String getCorrelationId() { return correlationId; } public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } public int getPollCount() { return pollCount; } public void setPollCount(int pollCount) { this.pollCount = pollCount; } public String getTaskDefName() { if (taskDefName == null || "".equals(taskDefName)) { taskDefName = taskType; } return taskDefName; } public void setTaskDefName(String taskDefName) { this.taskDefName = taskDefName; } public long getScheduledTime() { return scheduledTime; } public void setScheduledTime(long scheduledTime) { this.scheduledTime = scheduledTime; } public long getStartTime() { return startTime; } public void setStartTime(long startTime) { this.startTime = startTime; } public long getEndTime() { return endTime; } public void setEndTime(long endTime) { this.endTime = endTime; } public long getUpdateTime() { return updateTime; } public void setUpdateTime(long updateTime) { this.updateTime = updateTime; } public int getStartDelayInSeconds() { return startDelayInSeconds; } public void setStartDelayInSeconds(int startDelayInSeconds) { this.startDelayInSeconds = startDelayInSeconds; } public String getRetriedTaskId() { return retriedTaskId; } public void setRetriedTaskId(String retriedTaskId) { this.retriedTaskId = retriedTaskId; } public boolean isRetried() { return retried; } public void setRetried(boolean retried) { this.retried = retried; } public boolean isExecuted() { return executed; } public void setExecuted(boolean executed) { this.executed = executed; } public boolean isCallbackFromWorker() { return callbackFromWorker; } public void setCallbackFromWorker(boolean callbackFromWorker) { this.callbackFromWorker = callbackFromWorker; } public long getResponseTimeoutSeconds() { return responseTimeoutSeconds; } public void setResponseTimeoutSeconds(long responseTimeoutSeconds) { this.responseTimeoutSeconds = responseTimeoutSeconds; } public String getWorkflowInstanceId() { return workflowInstanceId; } public void setWorkflowInstanceId(String workflowInstanceId) { this.workflowInstanceId = workflowInstanceId; } public String getWorkflowType() { return workflowType; } public void setWorkflowType(String workflowType) { this.workflowType = workflowType; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getReasonForIncompletion() { return reasonForIncompletion; } public void setReasonForIncompletion(String reasonForIncompletion) { this.reasonForIncompletion = reasonForIncompletion; } public long getCallbackAfterSeconds() { return callbackAfterSeconds; } public void setCallbackAfterSeconds(long callbackAfterSeconds) { this.callbackAfterSeconds = callbackAfterSeconds; } public String getWorkerId() { return workerId; } public void setWorkerId(String workerId) { this.workerId = workerId; } @JsonIgnore public Map<String, Object> getOutputData() { if (!outputPayload.isEmpty() && !outputData.isEmpty()) { // Combine payload + data // data has precedence over payload because: // with external storage enabled, payload contains the old values // while data contains the latest and if payload took precedence, it // would remove latest outputs outputPayload.forEach(outputData::putIfAbsent); outputPayload = new HashMap<>(); return outputData; } else if (outputPayload.isEmpty()) { return outputData; } else { return outputPayload; } } @JsonIgnore public void setOutputData(Map<String, Object> outputData) { if (outputData == null) { outputData = new HashMap<>(); } this.outputData = outputData; } /** * @deprecated Used only for JSON serialization and deserialization. */ @JsonProperty("outputData") @Deprecated public void setRawOutputData(Map<String, Object> inputData) { setOutputData(inputData); } /** * @deprecated Used only for JSON serialization and deserialization. */ @JsonProperty("outputData") @Deprecated public Map<String, Object> getRawOutputData() { return outputData; } public WorkflowTask getWorkflowTask() { return workflowTask; } public void setWorkflowTask(WorkflowTask workflowTask) { this.workflowTask = workflowTask; } public String getDomain() { return domain; } public void setDomain(String domain) { this.domain = domain; } public Any getInputMessage() { return inputMessage; } public void setInputMessage(Any inputMessage) { this.inputMessage = inputMessage; } public Any getOutputMessage() { return outputMessage; } public void setOutputMessage(Any outputMessage) { this.outputMessage = outputMessage; } public int getRateLimitPerFrequency() { return rateLimitPerFrequency; } public void setRateLimitPerFrequency(int rateLimitPerFrequency) { this.rateLimitPerFrequency = rateLimitPerFrequency; } public int getRateLimitFrequencyInSeconds() { return rateLimitFrequencyInSeconds; } public void setRateLimitFrequencyInSeconds(int rateLimitFrequencyInSeconds) { this.rateLimitFrequencyInSeconds = rateLimitFrequencyInSeconds; } public String getExternalInputPayloadStoragePath() { return externalInputPayloadStoragePath; } public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; } public String getExternalOutputPayloadStoragePath() { return externalOutputPayloadStoragePath; } public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; } public int getWorkflowPriority() { return workflowPriority; } public void setWorkflowPriority(int workflowPriority) { this.workflowPriority = workflowPriority; } public String getExecutionNameSpace() { return executionNameSpace; } public void setExecutionNameSpace(String executionNameSpace) { this.executionNameSpace = executionNameSpace; } public String getIsolationGroupId() { return isolationGroupId; } public void setIsolationGroupId(String isolationGroupId) { this.isolationGroupId = isolationGroupId; } public int getIteration() { return iteration; } public void setIteration(int iteration) { this.iteration = iteration; } public String getSubWorkflowId() { // For backwards compatibility if (StringUtils.isNotBlank(subWorkflowId)) { return subWorkflowId; } else { return this.getOutputData() != null && this.getOutputData().get("subWorkflowId") != null ? (String) this.getOutputData().get("subWorkflowId") : this.getInputData() != null ? (String) this.getInputData().get("subWorkflowId") : null; } } public void setSubWorkflowId(String subWorkflowId) { this.subWorkflowId = subWorkflowId; // For backwards compatibility if (this.outputData != null && this.outputData.containsKey("subWorkflowId")) { this.outputData.put("subWorkflowId", subWorkflowId); } } public boolean isSubworkflowChanged() { return subworkflowChanged; } public void setSubworkflowChanged(boolean subworkflowChanged) { this.subworkflowChanged = subworkflowChanged; } public void incrementPollCount() { ++this.pollCount; } /** * @return {@link Optional} containing the task definition if available */ public Optional<TaskDef> getTaskDefinition() { return Optional.ofNullable(this.getWorkflowTask()).map(WorkflowTask::getTaskDefinition); } public boolean isLoopOverTask() { return iteration > 0; } public long getWaitTimeout() { return waitTimeout; } public void setWaitTimeout(long waitTimeout) { this.waitTimeout = waitTimeout; } /** * @return the queueWaitTime */ public long getQueueWaitTime() { if (this.startTime > 0 && this.scheduledTime > 0) { if (this.updateTime > 0 && getCallbackAfterSeconds() > 0) { long waitTime = System.currentTimeMillis() - (this.updateTime + (getCallbackAfterSeconds() * 1000)); return waitTime > 0 ? waitTime : 0; } else { return this.startTime - this.scheduledTime; } } return 0L; } /** * @return a copy of the task instance */ public TaskModel copy() { TaskModel copy = new TaskModel(); BeanUtils.copyProperties(this, copy); return copy; } public void externalizeInput(String path) { this.inputPayload = this.inputData; this.inputData = new HashMap<>(); this.externalInputPayloadStoragePath = path; } public void externalizeOutput(String path) { this.outputPayload = this.outputData; this.outputData = new HashMap<>(); this.externalOutputPayloadStoragePath = path; } public void internalizeInput(Map<String, Object> data) { this.inputData = new HashMap<>(); this.inputPayload = data; } public void internalizeOutput(Map<String, Object> data) { this.outputData = new HashMap<>(); this.outputPayload = data; } @Override public String toString() { return "TaskModel{" + "taskType='" + taskType + '\'' + ", status=" + status + ", inputData=" + inputData + ", referenceTaskName='" + referenceTaskName + '\'' + ", retryCount=" + retryCount + ", seq=" + seq + ", correlationId='" + correlationId + '\'' + ", pollCount=" + pollCount + ", taskDefName='" + taskDefName + '\'' + ", scheduledTime=" + scheduledTime + ", startTime=" + startTime + ", endTime=" + endTime + ", updateTime=" + updateTime + ", startDelayInSeconds=" + startDelayInSeconds + ", retriedTaskId='" + retriedTaskId + '\'' + ", retried=" + retried + ", executed=" + executed + ", callbackFromWorker=" + callbackFromWorker + ", responseTimeoutSeconds=" + responseTimeoutSeconds + ", workflowInstanceId='" + workflowInstanceId + '\'' + ", workflowType='" + workflowType + '\'' + ", taskId='" + taskId + '\'' + ", reasonForIncompletion='" + reasonForIncompletion + '\'' + ", callbackAfterSeconds=" + callbackAfterSeconds + ", workerId='" + workerId + '\'' + ", outputData=" + outputData + ", workflowTask=" + workflowTask + ", domain='" + domain + '\'' + ", waitTimeout='" + waitTimeout + '\'' + ", inputMessage=" + inputMessage + ", outputMessage=" + outputMessage + ", rateLimitPerFrequency=" + rateLimitPerFrequency + ", rateLimitFrequencyInSeconds=" + rateLimitFrequencyInSeconds + ", externalInputPayloadStoragePath='" + externalInputPayloadStoragePath + '\'' + ", externalOutputPayloadStoragePath='" + externalOutputPayloadStoragePath + '\'' + ", workflowPriority=" + workflowPriority + ", executionNameSpace='" + executionNameSpace + '\'' + ", isolationGroupId='" + isolationGroupId + '\'' + ", iteration=" + iteration + ", subWorkflowId='" + subWorkflowId + '\'' + ", subworkflowChanged=" + subworkflowChanged + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TaskModel taskModel = (TaskModel) o; return getRetryCount() == taskModel.getRetryCount() && getSeq() == taskModel.getSeq() && getPollCount() == taskModel.getPollCount() && getScheduledTime() == taskModel.getScheduledTime() && getStartTime() == taskModel.getStartTime() && getEndTime() == taskModel.getEndTime() && getUpdateTime() == taskModel.getUpdateTime() && getStartDelayInSeconds() == taskModel.getStartDelayInSeconds() && isRetried() == taskModel.isRetried() && isExecuted() == taskModel.isExecuted() && isCallbackFromWorker() == taskModel.isCallbackFromWorker() && getResponseTimeoutSeconds() == taskModel.getResponseTimeoutSeconds() && getCallbackAfterSeconds() == taskModel.getCallbackAfterSeconds() && getRateLimitPerFrequency() == taskModel.getRateLimitPerFrequency() && getRateLimitFrequencyInSeconds() == taskModel.getRateLimitFrequencyInSeconds() && getWorkflowPriority() == taskModel.getWorkflowPriority() && getIteration() == taskModel.getIteration() && isSubworkflowChanged() == taskModel.isSubworkflowChanged() && Objects.equals(getTaskType(), taskModel.getTaskType()) && getStatus() == taskModel.getStatus() && Objects.equals(getInputData(), taskModel.getInputData()) && Objects.equals(getReferenceTaskName(), taskModel.getReferenceTaskName()) && Objects.equals(getCorrelationId(), taskModel.getCorrelationId()) && Objects.equals(getTaskDefName(), taskModel.getTaskDefName()) && Objects.equals(getRetriedTaskId(), taskModel.getRetriedTaskId()) && Objects.equals(getWorkflowInstanceId(), taskModel.getWorkflowInstanceId()) && Objects.equals(getWorkflowType(), taskModel.getWorkflowType()) && Objects.equals(getTaskId(), taskModel.getTaskId()) && Objects.equals(getReasonForIncompletion(), taskModel.getReasonForIncompletion()) && Objects.equals(getWorkerId(), taskModel.getWorkerId()) && Objects.equals(getWaitTimeout(), taskModel.getWaitTimeout()) && Objects.equals(outputData, taskModel.outputData) && Objects.equals(outputPayload, taskModel.outputPayload) && Objects.equals(getWorkflowTask(), taskModel.getWorkflowTask()) && Objects.equals(getDomain(), taskModel.getDomain()) && Objects.equals(getInputMessage(), taskModel.getInputMessage()) && Objects.equals(getOutputMessage(), taskModel.getOutputMessage()) && Objects.equals( getExternalInputPayloadStoragePath(), taskModel.getExternalInputPayloadStoragePath()) && Objects.equals( getExternalOutputPayloadStoragePath(), taskModel.getExternalOutputPayloadStoragePath()) && Objects.equals(getExecutionNameSpace(), taskModel.getExecutionNameSpace()) && Objects.equals(getIsolationGroupId(), taskModel.getIsolationGroupId()) && Objects.equals(getSubWorkflowId(), taskModel.getSubWorkflowId()); } @Override public int hashCode() { return Objects.hash( getTaskType(), getStatus(), getInputData(), getReferenceTaskName(), getRetryCount(), getSeq(), getCorrelationId(), getPollCount(), getTaskDefName(), getScheduledTime(), getStartTime(), getEndTime(), getUpdateTime(), getStartDelayInSeconds(), getRetriedTaskId(), isRetried(), isExecuted(), isCallbackFromWorker(), getResponseTimeoutSeconds(), getWorkflowInstanceId(), getWorkflowType(), getTaskId(), getReasonForIncompletion(), getCallbackAfterSeconds(), getWorkerId(), getWaitTimeout(), outputData, outputPayload, getWorkflowTask(), getDomain(), getInputMessage(), getOutputMessage(), getRateLimitPerFrequency(), getRateLimitFrequencyInSeconds(), getExternalInputPayloadStoragePath(), getExternalOutputPayloadStoragePath(), getWorkflowPriority(), getExecutionNameSpace(), getIsolationGroupId(), getIteration(), getSubWorkflowId(), isSubworkflowChanged()); } public Task toTask() { Task task = new Task(); BeanUtils.copyProperties(this, task); task.setStatus(Task.Status.valueOf(status.name())); // ensure that input/output is properly represented if (externalInputPayloadStoragePath != null) { task.setInputData(new HashMap<>()); } if (externalOutputPayloadStoragePath != null) { task.setOutputData(new HashMap<>()); } return task; } public static Task.Status mapToTaskStatus(TaskModel.Status status) { return Task.Status.valueOf(status.name()); } public void addInput(String key, Object value) { this.inputData.put(key, value); } public void addInput(Map<String, Object> inputData) { if (inputData != null) { this.inputData.putAll(inputData); } } public void addOutput(String key, Object value) { this.outputData.put(key, value); } public void addOutput(Map<String, Object> outputData) { if (outputData != null) { this.outputData.putAll(outputData); } } public void clearOutput() { this.outputData.clear(); this.outputPayload.clear(); this.externalOutputPayloadStoragePath = null; } }
6,771
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/model/WorkflowModel.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.model; import java.util.*; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.BeanUtils; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.utils.Utils; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; public class WorkflowModel { public enum Status { RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), PAUSED(false, true); private final boolean terminal; private final boolean successful; Status(boolean terminal, boolean successful) { this.terminal = terminal; this.successful = successful; } public boolean isTerminal() { return terminal; } public boolean isSuccessful() { return successful; } } private Status status = Status.RUNNING; private long endTime; private String workflowId; private String parentWorkflowId; private String parentWorkflowTaskId; private List<TaskModel> tasks = new LinkedList<>(); private String correlationId; private String reRunFromWorkflowId; private String reasonForIncompletion; private String event; private Map<String, String> taskToDomain = new HashMap<>(); @JsonInclude(JsonInclude.Include.NON_EMPTY) private Set<String> failedReferenceTaskNames = new HashSet<>(); @JsonInclude(JsonInclude.Include.NON_EMPTY) private Set<String> failedTaskNames = new HashSet<>(); private WorkflowDef workflowDefinition; private String externalInputPayloadStoragePath; private String externalOutputPayloadStoragePath; private int priority; private Map<String, Object> variables = new HashMap<>(); private long lastRetriedTime; private String ownerApp; private Long createTime; private Long updatedTime; private String createdBy; private String updatedBy; // Capture the failed taskId if the workflow execution failed because of task failure private String failedTaskId; private Status previousStatus; @JsonIgnore private Map<String, Object> input = new HashMap<>(); @JsonIgnore private Map<String, Object> output = new HashMap<>(); @JsonIgnore private Map<String, Object> inputPayload = new HashMap<>(); @JsonIgnore private Map<String, Object> outputPayload = new HashMap<>(); public Status getPreviousStatus() { return previousStatus; } public void setPreviousStatus(Status status) { this.previousStatus = status; } public Status getStatus() { return status; } public void setStatus(Status status) { // update previous status if current status changed if (this.status != status) { setPreviousStatus(this.status); } this.status = status; } public long getEndTime() { return endTime; } public void setEndTime(long endTime) { this.endTime = endTime; } public String getWorkflowId() { return workflowId; } public void setWorkflowId(String workflowId) { this.workflowId = workflowId; } public String getParentWorkflowId() { return parentWorkflowId; } public void setParentWorkflowId(String parentWorkflowId) { this.parentWorkflowId = parentWorkflowId; } public String getParentWorkflowTaskId() { return parentWorkflowTaskId; } public void setParentWorkflowTaskId(String parentWorkflowTaskId) { this.parentWorkflowTaskId = parentWorkflowTaskId; } public List<TaskModel> getTasks() { return tasks; } public void setTasks(List<TaskModel> tasks) { this.tasks = tasks; } @JsonIgnore public Map<String, Object> getInput() { if (!inputPayload.isEmpty() && !input.isEmpty()) { input.putAll(inputPayload); inputPayload = new HashMap<>(); return input; } else if (inputPayload.isEmpty()) { return input; } else { return inputPayload; } } @JsonIgnore public void setInput(Map<String, Object> input) { if (input == null) { input = new HashMap<>(); } this.input = input; } @JsonIgnore public Map<String, Object> getOutput() { if (!outputPayload.isEmpty() && !output.isEmpty()) { output.putAll(outputPayload); outputPayload = new HashMap<>(); return output; } else if (outputPayload.isEmpty()) { return output; } else { return outputPayload; } } @JsonIgnore public void setOutput(Map<String, Object> output) { if (output == null) { output = new HashMap<>(); } this.output = output; } /** * @deprecated Used only for JSON serialization and deserialization. */ @Deprecated @JsonProperty("input") public Map<String, Object> getRawInput() { return input; } /** * @deprecated Used only for JSON serialization and deserialization. */ @Deprecated @JsonProperty("input") public void setRawInput(Map<String, Object> input) { setInput(input); } /** * @deprecated Used only for JSON serialization and deserialization. */ @Deprecated @JsonProperty("output") public Map<String, Object> getRawOutput() { return output; } /** * @deprecated Used only for JSON serialization and deserialization. */ @Deprecated @JsonProperty("output") public void setRawOutput(Map<String, Object> output) { setOutput(output); } public String getCorrelationId() { return correlationId; } public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } public String getReRunFromWorkflowId() { return reRunFromWorkflowId; } public void setReRunFromWorkflowId(String reRunFromWorkflowId) { this.reRunFromWorkflowId = reRunFromWorkflowId; } public String getReasonForIncompletion() { return reasonForIncompletion; } public void setReasonForIncompletion(String reasonForIncompletion) { this.reasonForIncompletion = reasonForIncompletion; } public String getEvent() { return event; } public void setEvent(String event) { this.event = event; } public Map<String, String> getTaskToDomain() { return taskToDomain; } public void setTaskToDomain(Map<String, String> taskToDomain) { this.taskToDomain = taskToDomain; } public Set<String> getFailedReferenceTaskNames() { return failedReferenceTaskNames; } public void setFailedReferenceTaskNames(Set<String> failedReferenceTaskNames) { this.failedReferenceTaskNames = failedReferenceTaskNames; } public Set<String> getFailedTaskNames() { return failedTaskNames; } public void setFailedTaskNames(Set<String> failedTaskNames) { this.failedTaskNames = failedTaskNames; } public WorkflowDef getWorkflowDefinition() { return workflowDefinition; } public void setWorkflowDefinition(WorkflowDef workflowDefinition) { this.workflowDefinition = workflowDefinition; } public String getExternalInputPayloadStoragePath() { return externalInputPayloadStoragePath; } public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; } public String getExternalOutputPayloadStoragePath() { return externalOutputPayloadStoragePath; } public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; } public int getPriority() { return priority; } public void setPriority(int priority) { if (priority < 0 || priority > 99) { throw new IllegalArgumentException("priority MUST be between 0 and 99 (inclusive)"); } this.priority = priority; } public Map<String, Object> getVariables() { return variables; } public void setVariables(Map<String, Object> variables) { this.variables = variables; } public long getLastRetriedTime() { return lastRetriedTime; } public void setLastRetriedTime(long lastRetriedTime) { this.lastRetriedTime = lastRetriedTime; } public String getOwnerApp() { return ownerApp; } public void setOwnerApp(String ownerApp) { this.ownerApp = ownerApp; } public Long getCreateTime() { return createTime; } public void setCreateTime(Long createTime) { this.createTime = createTime; } public Long getUpdatedTime() { return updatedTime; } public void setUpdatedTime(Long updatedTime) { this.updatedTime = updatedTime; } public String getCreatedBy() { return createdBy; } public void setCreatedBy(String createdBy) { this.createdBy = createdBy; } public String getUpdatedBy() { return updatedBy; } public void setUpdatedBy(String updatedBy) { this.updatedBy = updatedBy; } public String getFailedTaskId() { return failedTaskId; } public void setFailedTaskId(String failedTaskId) { this.failedTaskId = failedTaskId; } /** * Convenience method for accessing the workflow definition name. * * @return the workflow definition name. */ public String getWorkflowName() { Utils.checkNotNull(workflowDefinition, "Workflow definition is null"); return workflowDefinition.getName(); } /** * Convenience method for accessing the workflow definition version. * * @return the workflow definition version. */ public int getWorkflowVersion() { Utils.checkNotNull(workflowDefinition, "Workflow definition is null"); return workflowDefinition.getVersion(); } public boolean hasParent() { return StringUtils.isNotEmpty(parentWorkflowId); } /** * A string representation of all relevant fields that identify this workflow. Intended for use * in log and other system generated messages. */ public String toShortString() { String name = workflowDefinition != null ? workflowDefinition.getName() : null; Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; return String.format("%s.%s/%s", name, version, workflowId); } public TaskModel getTaskByRefName(String refName) { if (refName == null) { throw new RuntimeException( "refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value"); } LinkedList<TaskModel> found = new LinkedList<>(); for (TaskModel task : tasks) { if (task.getReferenceTaskName() == null) { throw new RuntimeException( "Task " + task.getTaskDefName() + ", seq=" + task.getSeq() + " does not have reference name specified."); } if (task.getReferenceTaskName().equals(refName)) { found.add(task); } } if (found.isEmpty()) { return null; } return found.getLast(); } public void externalizeInput(String path) { this.inputPayload = this.input; this.input = new HashMap<>(); this.externalInputPayloadStoragePath = path; } public void externalizeOutput(String path) { this.outputPayload = this.output; this.output = new HashMap<>(); this.externalOutputPayloadStoragePath = path; } public void internalizeInput(Map<String, Object> data) { this.input = new HashMap<>(); this.inputPayload = data; } public void internalizeOutput(Map<String, Object> data) { this.output = new HashMap<>(); this.outputPayload = data; } @Override public String toString() { String name = workflowDefinition != null ? workflowDefinition.getName() : null; Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; return String.format("%s.%s/%s.%s", name, version, workflowId, status); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; WorkflowModel that = (WorkflowModel) o; return getEndTime() == that.getEndTime() && getPriority() == that.getPriority() && getLastRetriedTime() == that.getLastRetriedTime() && getStatus() == that.getStatus() && Objects.equals(getWorkflowId(), that.getWorkflowId()) && Objects.equals(getParentWorkflowId(), that.getParentWorkflowId()) && Objects.equals(getParentWorkflowTaskId(), that.getParentWorkflowTaskId()) && Objects.equals(getTasks(), that.getTasks()) && Objects.equals(getInput(), that.getInput()) && Objects.equals(output, that.output) && Objects.equals(outputPayload, that.outputPayload) && Objects.equals(getCorrelationId(), that.getCorrelationId()) && Objects.equals(getReRunFromWorkflowId(), that.getReRunFromWorkflowId()) && Objects.equals(getReasonForIncompletion(), that.getReasonForIncompletion()) && Objects.equals(getEvent(), that.getEvent()) && Objects.equals(getTaskToDomain(), that.getTaskToDomain()) && Objects.equals(getFailedReferenceTaskNames(), that.getFailedReferenceTaskNames()) && Objects.equals(getFailedTaskNames(), that.getFailedTaskNames()) && Objects.equals(getWorkflowDefinition(), that.getWorkflowDefinition()) && Objects.equals( getExternalInputPayloadStoragePath(), that.getExternalInputPayloadStoragePath()) && Objects.equals( getExternalOutputPayloadStoragePath(), that.getExternalOutputPayloadStoragePath()) && Objects.equals(getVariables(), that.getVariables()) && Objects.equals(getOwnerApp(), that.getOwnerApp()) && Objects.equals(getCreateTime(), that.getCreateTime()) && Objects.equals(getUpdatedTime(), that.getUpdatedTime()) && Objects.equals(getCreatedBy(), that.getCreatedBy()) && Objects.equals(getUpdatedBy(), that.getUpdatedBy()); } @Override public int hashCode() { return Objects.hash( getStatus(), getEndTime(), getWorkflowId(), getParentWorkflowId(), getParentWorkflowTaskId(), getTasks(), getInput(), output, outputPayload, getCorrelationId(), getReRunFromWorkflowId(), getReasonForIncompletion(), getEvent(), getTaskToDomain(), getFailedReferenceTaskNames(), getFailedTaskNames(), getWorkflowDefinition(), getExternalInputPayloadStoragePath(), getExternalOutputPayloadStoragePath(), getPriority(), getVariables(), getLastRetriedTime(), getOwnerApp(), getCreateTime(), getUpdatedTime(), getCreatedBy(), getUpdatedBy()); } public Workflow toWorkflow() { Workflow workflow = new Workflow(); BeanUtils.copyProperties(this, workflow); workflow.setStatus(Workflow.WorkflowStatus.valueOf(this.status.name())); workflow.setTasks(tasks.stream().map(TaskModel::toTask).collect(Collectors.toList())); workflow.setUpdateTime(this.updatedTime); // ensure that input/output is properly represented if (externalInputPayloadStoragePath != null) { workflow.setInput(new HashMap<>()); } if (externalOutputPayloadStoragePath != null) { workflow.setOutput(new HashMap<>()); } return workflow; } public void addInput(String key, Object value) { this.input.put(key, value); } public void addInput(Map<String, Object> inputData) { if (inputData != null) { this.input.putAll(inputData); } } public void addOutput(String key, Object value) { this.output.put(key, value); } public void addOutput(Map<String, Object> outputData) { if (outputData != null) { this.output.putAll(outputData); } } }
6,772
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/ExecutionLockService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.sync.Lock; import com.netflix.conductor.metrics.Monitors; @Service @Trace public class ExecutionLockService { private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionLockService.class); private final ConductorProperties properties; private final Lock lock; private final long lockLeaseTime; private final long lockTimeToTry; @Autowired public ExecutionLockService(ConductorProperties properties, Lock lock) { this.properties = properties; this.lock = lock; this.lockLeaseTime = properties.getLockLeaseTime().toMillis(); this.lockTimeToTry = properties.getLockTimeToTry().toMillis(); } /** * Tries to acquire lock with reasonable timeToTry duration and lease time. Exits if a lock * cannot be acquired. Considering that the workflow decide can be triggered through multiple * entry points, and periodically through the sweeper service, do not block on acquiring the * lock, as the order of execution of decides on a workflow doesn't matter. * * @param lockId * @return */ public boolean acquireLock(String lockId) { return acquireLock(lockId, lockTimeToTry, lockLeaseTime); } public boolean acquireLock(String lockId, long timeToTryMs) { return acquireLock(lockId, timeToTryMs, lockLeaseTime); } public boolean acquireLock(String lockId, long timeToTryMs, long leaseTimeMs) { if (properties.isWorkflowExecutionLockEnabled()) { if (!lock.acquireLock(lockId, timeToTryMs, leaseTimeMs, TimeUnit.MILLISECONDS)) { LOGGER.debug( "Thread {} failed to acquire lock to lockId {}.", Thread.currentThread().getId(), lockId); Monitors.recordAcquireLockUnsuccessful(); return false; } LOGGER.debug( "Thread {} acquired lock to lockId {}.", Thread.currentThread().getId(), lockId); } return true; } /** * Blocks until it gets the lock for workflowId * * @param lockId */ public void waitForLock(String lockId) { if (properties.isWorkflowExecutionLockEnabled()) { lock.acquireLock(lockId); LOGGER.debug( "Thread {} acquired lock to lockId {}.", Thread.currentThread().getId(), lockId); } } public void releaseLock(String lockId) { if (properties.isWorkflowExecutionLockEnabled()) { lock.releaseLock(lockId); LOGGER.debug( "Thread {} released lock to lockId {}.", Thread.currentThread().getId(), lockId); } } public void deleteLock(String lockId) { if (properties.isWorkflowExecutionLockEnabled()) { lock.deleteLock(lockId); LOGGER.debug("Thread {} deleted lockId {}.", Thread.currentThread().getId(), lockId); } } }
6,773
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/AdminService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import java.util.Map; import javax.validation.constraints.NotEmpty; import org.springframework.validation.annotation.Validated; import com.netflix.conductor.common.metadata.tasks.Task; @Validated public interface AdminService { /** * Queue up all the running workflows for sweep. * * @param workflowId Id of the workflow * @return the id of the workflow instance that can be use for tracking. */ String requeueSweep( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); /** * Get all the configuration parameters. * * @return all the configuration parameters. */ Map<String, Object> getAllConfig(); /** * Get the list of pending tasks for a given task type. * * @param taskType Name of the task * @param start Start index of pagination * @param count Number of entries * @return list of pending {@link Task} */ List<Task> getListOfPendingTask( @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, Integer start, Integer count); /** * Verify that the Workflow is consistent, and run repairs as needed. * * @param workflowId id of the workflow to be returned * @return true, if repair was successful */ boolean verifyAndRepairWorkflowConsistency( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); /** * Get registered queues. * * @param verbose `true|false` for verbose logs * @return map of event queues */ Map<String, ?> getEventQueues(boolean verbose); }
6,774
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.netflix.conductor.annotations.Audit; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.model.BulkResponse; import com.netflix.conductor.core.execution.WorkflowExecutor; @Audit @Trace @Service public class WorkflowBulkServiceImpl implements WorkflowBulkService { private static final Logger LOGGER = LoggerFactory.getLogger(WorkflowBulkService.class); private final WorkflowExecutor workflowExecutor; public WorkflowBulkServiceImpl(WorkflowExecutor workflowExecutor) { this.workflowExecutor = workflowExecutor; } /** * Pause the list of workflows. * * @param workflowIds - list of workflow Ids to perform pause operation on * @return bulk response object containing a list of succeeded workflows and a list of failed * ones with errors */ public BulkResponse pauseWorkflow(List<String> workflowIds) { BulkResponse bulkResponse = new BulkResponse(); for (String workflowId : workflowIds) { try { workflowExecutor.pauseWorkflow(workflowId); bulkResponse.appendSuccessResponse(workflowId); } catch (Exception e) { LOGGER.error( "bulk pauseWorkflow exception, workflowId {}, message: {} ", workflowId, e.getMessage(), e); bulkResponse.appendFailedResponse(workflowId, e.getMessage()); } } return bulkResponse; } /** * Resume the list of workflows. * * @param workflowIds - list of workflow Ids to perform resume operation on * @return bulk response object containing a list of succeeded workflows and a list of failed * ones with errors */ public BulkResponse resumeWorkflow(List<String> workflowIds) { BulkResponse bulkResponse = new BulkResponse(); for (String workflowId : workflowIds) { try { workflowExecutor.resumeWorkflow(workflowId); bulkResponse.appendSuccessResponse(workflowId); } catch (Exception e) { LOGGER.error( "bulk resumeWorkflow exception, workflowId {}, message: {} ", workflowId, e.getMessage(), e); bulkResponse.appendFailedResponse(workflowId, e.getMessage()); } } return bulkResponse; } /** * Restart the list of workflows. * * @param workflowIds - list of workflow Ids to perform restart operation on * @param useLatestDefinitions if true, use latest workflow and task definitions upon restart * @return bulk response object containing a list of succeeded workflows and a list of failed * ones with errors */ public BulkResponse restart(List<String> workflowIds, boolean useLatestDefinitions) { BulkResponse bulkResponse = new BulkResponse(); for (String workflowId : workflowIds) { try { workflowExecutor.restart(workflowId, useLatestDefinitions); bulkResponse.appendSuccessResponse(workflowId); } catch (Exception e) { LOGGER.error( "bulk restart exception, workflowId {}, message: {} ", workflowId, e.getMessage(), e); bulkResponse.appendFailedResponse(workflowId, e.getMessage()); } } return bulkResponse; } /** * Retry the last failed task for each workflow from the list. * * @param workflowIds - list of workflow Ids to perform retry operation on * @return bulk response object containing a list of succeeded workflows and a list of failed * ones with errors */ public BulkResponse retry(List<String> workflowIds) { BulkResponse bulkResponse = new BulkResponse(); for (String workflowId : workflowIds) { try { workflowExecutor.retry(workflowId, false); bulkResponse.appendSuccessResponse(workflowId); } catch (Exception e) { LOGGER.error( "bulk retry exception, workflowId {}, message: {} ", workflowId, e.getMessage(), e); bulkResponse.appendFailedResponse(workflowId, e.getMessage()); } } return bulkResponse; } /** * Terminate workflows execution. * * @param workflowIds - list of workflow Ids to perform terminate operation on * @param reason - description to be specified for the terminated workflow for future * references. * @return bulk response object containing a list of succeeded workflows and a list of failed * ones with errors */ public BulkResponse terminate(List<String> workflowIds, String reason) { BulkResponse bulkResponse = new BulkResponse(); for (String workflowId : workflowIds) { try { workflowExecutor.terminateWorkflow(workflowId, reason); bulkResponse.appendSuccessResponse(workflowId); } catch (Exception e) { LOGGER.error( "bulk terminate exception, workflowId {}, message: {} ", workflowId, e.getMessage(), e); bulkResponse.appendFailedResponse(workflowId, e.getMessage()); } } return bulkResponse; } }
6,775
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/WorkflowServiceImpl.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.springframework.stereotype.Service; import com.netflix.conductor.annotations.Audit; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.ExternalStorageLocation; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.core.execution.StartWorkflowInput; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.core.operation.StartWorkflowOperation; import com.netflix.conductor.core.utils.Utils; @Audit @Trace @Service public class WorkflowServiceImpl implements WorkflowService { private final WorkflowExecutor workflowExecutor; private final ExecutionService executionService; private final MetadataService metadataService; private final StartWorkflowOperation startWorkflowOperation; public WorkflowServiceImpl( WorkflowExecutor workflowExecutor, ExecutionService executionService, MetadataService metadataService, StartWorkflowOperation startWorkflowOperation) { this.workflowExecutor = workflowExecutor; this.executionService = executionService; this.metadataService = metadataService; this.startWorkflowOperation = startWorkflowOperation; } /** * Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain. * * @param startWorkflowRequest StartWorkflow request for the workflow you want to start. * @return the id of the workflow instance that can be use for tracking. */ public String startWorkflow(StartWorkflowRequest startWorkflowRequest) { return startWorkflowOperation.execute(new StartWorkflowInput(startWorkflowRequest)); } /** * Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain. * * @param name Name of the workflow you want to start. * @param version Version of the workflow you want to start. * @param correlationId CorrelationID of the workflow you want to start. * @param priority Priority of the workflow you want to start. * @param input Input to the workflow you want to start. * @param externalInputPayloadStoragePath the relative path in external storage where input * * payload is located * @param taskToDomain the task to domain mapping * @param workflowDef - workflow definition * @return the id of the workflow instance that can be use for tracking. */ public String startWorkflow( String name, Integer version, String correlationId, Integer priority, Map<String, Object> input, String externalInputPayloadStoragePath, Map<String, String> taskToDomain, WorkflowDef workflowDef) { StartWorkflowInput startWorkflowInput = new StartWorkflowInput(); startWorkflowInput.setName(name); startWorkflowInput.setVersion(version); startWorkflowInput.setCorrelationId(correlationId); startWorkflowInput.setPriority(priority); startWorkflowInput.setWorkflowInput(input); startWorkflowInput.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath); startWorkflowInput.setTaskToDomain(taskToDomain); startWorkflowInput.setWorkflowDefinition(workflowDef); return startWorkflowOperation.execute(startWorkflowInput); } /** * Start a new workflow. Returns the ID of the workflow instance that can be later used for * tracking. * * @param name Name of the workflow you want to start. * @param version Version of the workflow you want to start. * @param correlationId CorrelationID of the workflow you want to start. * @param priority Priority of the workflow you want to start. * @param input Input to the workflow you want to start. * @return the id of the workflow instance that can be use for tracking. */ public String startWorkflow( String name, Integer version, String correlationId, Integer priority, Map<String, Object> input) { WorkflowDef workflowDef = metadataService.getWorkflowDef(name, version); if (workflowDef == null) { throw new NotFoundException( "No such workflow found by name: %s, version: %d", name, version); } StartWorkflowInput startWorkflowInput = new StartWorkflowInput(); startWorkflowInput.setName(workflowDef.getName()); startWorkflowInput.setVersion(workflowDef.getVersion()); startWorkflowInput.setCorrelationId(correlationId); startWorkflowInput.setPriority(priority); startWorkflowInput.setWorkflowInput(input); return startWorkflowOperation.execute(startWorkflowInput); } /** * Lists workflows for the given correlation id. * * @param name Name of the workflow. * @param correlationId CorrelationID of the workflow you want to start. * @param includeClosed IncludeClosed workflow which are not running. * @param includeTasks Includes tasks associated with workflows. * @return a list of {@link Workflow} */ public List<Workflow> getWorkflows( String name, String correlationId, boolean includeClosed, boolean includeTasks) { return executionService.getWorkflowInstances( name, correlationId, includeClosed, includeTasks); } /** * Lists workflows for the given correlation id. * * @param name Name of the workflow. * @param includeClosed CorrelationID of the workflow you want to start. * @param includeTasks IncludeClosed workflow which are not running. * @param correlationIds Includes tasks associated with workflows. * @return a {@link Map} of {@link String} as key and a list of {@link Workflow} as value */ public Map<String, List<Workflow>> getWorkflows( String name, boolean includeClosed, boolean includeTasks, List<String> correlationIds) { Map<String, List<Workflow>> workflowMap = new HashMap<>(); for (String correlationId : correlationIds) { List<Workflow> workflows = executionService.getWorkflowInstances( name, correlationId, includeClosed, includeTasks); workflowMap.put(correlationId, workflows); } return workflowMap; } /** * Gets the workflow by workflow id. * * @param workflowId id of the workflow. * @param includeTasks Includes tasks associated with workflow. * @return an instance of {@link Workflow} */ public Workflow getExecutionStatus(String workflowId, boolean includeTasks) { Workflow workflow = executionService.getExecutionStatus(workflowId, includeTasks); if (workflow == null) { throw new NotFoundException("Workflow with id: %s not found.", workflowId); } return workflow; } /** * Removes the workflow from the system. * * @param workflowId WorkflowID of the workflow you want to remove from system. * @param archiveWorkflow Archives the workflow and associated tasks instead of removing them. */ public void deleteWorkflow(String workflowId, boolean archiveWorkflow) { executionService.removeWorkflow(workflowId, archiveWorkflow); } /** * Retrieves all the running workflows. * * @param workflowName Name of the workflow. * @param version Version of the workflow. * @param startTime start time of the workflow. * @param endTime EndTime of the workflow * @return a list of workflow Ids. */ public List<String> getRunningWorkflows( String workflowName, Integer version, Long startTime, Long endTime) { if (Optional.ofNullable(startTime).orElse(0L) != 0 && Optional.ofNullable(endTime).orElse(0L) != 0) { return workflowExecutor.getWorkflows(workflowName, version, startTime, endTime); } else { version = Optional.ofNullable(version) .orElseGet( () -> { WorkflowDef workflowDef = metadataService.getWorkflowDef(workflowName, null); return workflowDef.getVersion(); }); return workflowExecutor.getRunningWorkflowIds(workflowName, version); } } /** * Starts the decision task for a workflow. * * @param workflowId WorkflowId of the workflow. */ public void decideWorkflow(String workflowId) { workflowExecutor.decide(workflowId); } /** * Pauses the workflow given a workflowId. * * @param workflowId WorkflowId of the workflow. */ public void pauseWorkflow(String workflowId) { workflowExecutor.pauseWorkflow(workflowId); } /** * Resumes the workflow. * * @param workflowId WorkflowId of the workflow. */ public void resumeWorkflow(String workflowId) { workflowExecutor.resumeWorkflow(workflowId); } /** * Skips a given task from a current running workflow. * * @param workflowId WorkflowId of the workflow. * @param taskReferenceName The task reference name. * @param skipTaskRequest {@link SkipTaskRequest} for task you want to skip. */ public void skipTaskFromWorkflow( String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) { workflowExecutor.skipTaskFromWorkflow(workflowId, taskReferenceName, skipTaskRequest); } /** * Reruns the workflow from a specific task. * * @param workflowId WorkflowId of the workflow you want to rerun. * @param request (@link RerunWorkflowRequest) for the workflow. * @return WorkflowId of the rerun workflow. */ public String rerunWorkflow(String workflowId, RerunWorkflowRequest request) { request.setReRunFromWorkflowId(workflowId); return workflowExecutor.rerun(request); } /** * Restarts a completed workflow. * * @param workflowId WorkflowId of the workflow. * @param useLatestDefinitions if true, use the latest workflow and task definitions upon * restart */ public void restartWorkflow(String workflowId, boolean useLatestDefinitions) { workflowExecutor.restart(workflowId, useLatestDefinitions); } /** * Retries the last failed task. * * @param workflowId WorkflowId of the workflow. */ public void retryWorkflow(String workflowId, boolean resumeSubworkflowTasks) { workflowExecutor.retry(workflowId, resumeSubworkflowTasks); } /** * Resets callback times of all non-terminal SIMPLE tasks to 0. * * @param workflowId WorkflowId of the workflow. */ public void resetWorkflow(String workflowId) { workflowExecutor.resetCallbacksForWorkflow(workflowId); } /** * Terminate workflow execution. * * @param workflowId WorkflowId of the workflow. * @param reason Reason for terminating the workflow. */ public void terminateWorkflow(String workflowId, String reason) { workflowExecutor.terminateWorkflow(workflowId, reason); } /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<WorkflowSummary> searchWorkflows( int start, int size, String sort, String freeText, String query) { return executionService.search( query, freeText, start, size, Utils.convertStringToList(sort)); } /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<Workflow> searchWorkflowsV2( int start, int size, String sort, String freeText, String query) { return executionService.searchV2( query, freeText, start, size, Utils.convertStringToList(sort)); } /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<WorkflowSummary> searchWorkflows( int start, int size, List<String> sort, String freeText, String query) { return executionService.search(query, freeText, start, size, sort); } /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<Workflow> searchWorkflowsV2( int start, int size, List<String> sort, String freeText, String query) { return executionService.searchV2(query, freeText, start, size, sort); } /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<WorkflowSummary> searchWorkflowsByTasks( int start, int size, String sort, String freeText, String query) { return executionService.searchWorkflowByTasks( query, freeText, start, size, Utils.convertStringToList(sort)); } /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<Workflow> searchWorkflowsByTasksV2( int start, int size, String sort, String freeText, String query) { return executionService.searchWorkflowByTasksV2( query, freeText, start, size, Utils.convertStringToList(sort)); } /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<WorkflowSummary> searchWorkflowsByTasks( int start, int size, List<String> sort, String freeText, String query) { return executionService.searchWorkflowByTasks(query, freeText, start, size, sort); } /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<Workflow> searchWorkflowsByTasksV2( int start, int size, List<String> sort, String freeText, String query) { return executionService.searchWorkflowByTasksV2(query, freeText, start, size, sort); } /** * Get the external storage location where the workflow input payload is stored/to be stored * * @param path the path for which the external storage location is to be populated * @param operation the operation to be performed (read or write) * @param type the type of payload (input or output) * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is * stored in external storage */ public ExternalStorageLocation getExternalStorageLocation( String path, String operation, String type) { return executionService.getExternalStorageLocation(path, operation, type); } }
6,776
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/WorkflowService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import java.util.Map; import javax.validation.Valid; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import org.springframework.validation.annotation.Validated; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.ExternalStorageLocation; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; @Validated public interface WorkflowService { /** * Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain. * * @param startWorkflowRequest StartWorkflow request for the workflow you want to start. * @return the id of the workflow instance that can be use for tracking. */ String startWorkflow( @NotNull(message = "StartWorkflowRequest cannot be null") @Valid StartWorkflowRequest startWorkflowRequest); /** * Start a new workflow. Returns the ID of the workflow instance that can be later used for * tracking. * * @param name Name of the workflow you want to start. * @param version Version of the workflow you want to start. * @param correlationId CorrelationID of the workflow you want to start. * @param priority Priority of the workflow you want to start. * @param input Input to the workflow you want to start. * @return the id of the workflow instance that can be use for tracking. */ String startWorkflow( @NotEmpty(message = "Workflow name cannot be null or empty") String name, Integer version, String correlationId, @Min(value = 0, message = "0 is the minimum priority value") @Max(value = 99, message = "99 is the maximum priority value") Integer priority, Map<String, Object> input); /** * Start a new workflow. Returns the ID of the workflow instance that can be later used for * tracking. * * @param name Name of the workflow you want to start. * @param version Version of the workflow you want to start. * @param correlationId CorrelationID of the workflow you want to start. * @param priority Priority of the workflow you want to start. * @param input Input to the workflow you want to start. * @param externalInputPayloadStoragePath * @param taskToDomain * @param workflowDef - workflow definition * @return the id of the workflow instance that can be use for tracking. */ String startWorkflow( String name, Integer version, String correlationId, Integer priority, Map<String, Object> input, String externalInputPayloadStoragePath, Map<String, String> taskToDomain, WorkflowDef workflowDef); /** * Lists workflows for the given correlation id. * * @param name Name of the workflow. * @param correlationId CorrelationID of the workflow you want to list. * @param includeClosed IncludeClosed workflow which are not running. * @param includeTasks Includes tasks associated with workflows. * @return a list of {@link Workflow} */ List<Workflow> getWorkflows( @NotEmpty(message = "Workflow name cannot be null or empty") String name, String correlationId, boolean includeClosed, boolean includeTasks); /** * Lists workflows for the given correlation id. * * @param name Name of the workflow. * @param includeClosed CorrelationID of the workflow you want to start. * @param includeTasks IncludeClosed workflow which are not running. * @param correlationIds Includes tasks associated with workflows. * @return a {@link Map} of {@link String} as key and a list of {@link Workflow} as value */ Map<String, List<Workflow>> getWorkflows( @NotEmpty(message = "Workflow name cannot be null or empty") String name, boolean includeClosed, boolean includeTasks, List<String> correlationIds); /** * Gets the workflow by workflow Id. * * @param workflowId Id of the workflow. * @param includeTasks Includes tasks associated with workflow. * @return an instance of {@link Workflow} */ Workflow getExecutionStatus( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, boolean includeTasks); /** * Removes the workflow from the system. * * @param workflowId WorkflowID of the workflow you want to remove from system. * @param archiveWorkflow Archives the workflow and associated tasks instead of removing them. */ void deleteWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, boolean archiveWorkflow); /** * Retrieves all the running workflows. * * @param workflowName Name of the workflow. * @param version Version of the workflow. * @param startTime Starttime of the workflow. * @param endTime EndTime of the workflow * @return a list of workflow Ids. */ List<String> getRunningWorkflows( @NotEmpty(message = "Workflow name cannot be null or empty.") String workflowName, Integer version, Long startTime, Long endTime); /** * Starts the decision task for a workflow. * * @param workflowId WorkflowId of the workflow. */ void decideWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); /** * Pauses the workflow given a worklfowId. * * @param workflowId WorkflowId of the workflow. */ void pauseWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); /** * Resumes the workflow. * * @param workflowId WorkflowId of the workflow. */ void resumeWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); /** * Skips a given task from a current running workflow. * * @param workflowId WorkflowId of the workflow. * @param taskReferenceName The task reference name. * @param skipTaskRequest {@link SkipTaskRequest} for task you want to skip. */ void skipTaskFromWorkflow( @NotEmpty(message = "WorkflowId name cannot be null or empty.") String workflowId, @NotEmpty(message = "TaskReferenceName cannot be null or empty.") String taskReferenceName, SkipTaskRequest skipTaskRequest); /** * Reruns the workflow from a specific task. * * @param workflowId WorkflowId of the workflow you want to rerun. * @param request (@link RerunWorkflowRequest) for the workflow. * @return WorkflowId of the rerun workflow. */ String rerunWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, @NotNull(message = "RerunWorkflowRequest cannot be null.") RerunWorkflowRequest request); /** * Restarts a completed workflow. * * @param workflowId WorkflowId of the workflow. * @param useLatestDefinitions if true, use the latest workflow and task definitions upon * restart */ void restartWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, boolean useLatestDefinitions); /** * Retries the last failed task. * * @param workflowId WorkflowId of the workflow. */ void retryWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, boolean resumeSubworkflowTasks); /** * Resets callback times of all non-terminal SIMPLE tasks to 0. * * @param workflowId WorkflowId of the workflow. */ void resetWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId); /** * Terminate workflow execution. * * @param workflowId WorkflowId of the workflow. * @param reason Reason for terminating the workflow. */ void terminateWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, String reason); /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<WorkflowSummary> searchWorkflows( int start, @Max( value = 5_000, message = "Cannot return more than {value} workflows. Please use pagination.") int size, String sort, String freeText, String query); /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<Workflow> searchWorkflowsV2( int start, @Max( value = 5_000, message = "Cannot return more than {value} workflows. Please use pagination.") int size, String sort, String freeText, String query); /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<WorkflowSummary> searchWorkflows( int start, @Max( value = 5_000, message = "Cannot return more than {value} workflows. Please use pagination.") int size, List<String> sort, String freeText, String query); /** * Search for workflows based on payload and given parameters. Use sort options as sort ASCor * DESC e.g. sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<Workflow> searchWorkflowsV2( int start, @Max( value = 5_000, message = "Cannot return more than {value} workflows. Please use pagination.") int size, List<String> sort, String freeText, String query); /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<WorkflowSummary> searchWorkflowsByTasks( int start, int size, String sort, String freeText, String query); /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<Workflow> searchWorkflowsByTasksV2( int start, int size, String sort, String freeText, String query); /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<WorkflowSummary> searchWorkflowsByTasks( int start, int size, List<String> sort, String freeText, String query); /** * Search for workflows based on task parameters. Use sort options as sort ASC or DESC e.g. * sort=name or sort=workflowId:DESC. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort list of sorting options, separated by "|" delimiter * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<Workflow> searchWorkflowsByTasksV2( int start, int size, List<String> sort, String freeText, String query); /** * Get the external storage location where the workflow input payload is stored/to be stored * * @param path the path for which the external storage location is to be populated * @param operation the operation to be performed (read or write) * @param payloadType the type of payload (input or output) * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is * stored in external storage */ ExternalStorageLocation getExternalStorageLocation( String path, String operation, String payloadType); }
6,777
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/ExecutionService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.*; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.*; import com.netflix.conductor.common.run.*; import com.netflix.conductor.common.utils.ExternalPayloadStorage; import com.netflix.conductor.common.utils.ExternalPayloadStorage.Operation; import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.dal.ExecutionDAOFacade; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.core.execution.tasks.SystemTaskRegistry; import com.netflix.conductor.core.utils.QueueUtils; import com.netflix.conductor.core.utils.Utils; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.metrics.Monitors; import com.netflix.conductor.model.TaskModel; @Trace @Service public class ExecutionService { private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionService.class); private final WorkflowExecutor workflowExecutor; private final ExecutionDAOFacade executionDAOFacade; private final QueueDAO queueDAO; private final ExternalPayloadStorage externalPayloadStorage; private final SystemTaskRegistry systemTaskRegistry; private final long queueTaskMessagePostponeSecs; private static final int MAX_POLL_TIMEOUT_MS = 5000; private static final int POLL_COUNT_ONE = 1; private static final int POLLING_TIMEOUT_IN_MS = 100; public ExecutionService( WorkflowExecutor workflowExecutor, ExecutionDAOFacade executionDAOFacade, QueueDAO queueDAO, ConductorProperties properties, ExternalPayloadStorage externalPayloadStorage, SystemTaskRegistry systemTaskRegistry) { this.workflowExecutor = workflowExecutor; this.executionDAOFacade = executionDAOFacade; this.queueDAO = queueDAO; this.externalPayloadStorage = externalPayloadStorage; this.queueTaskMessagePostponeSecs = properties.getTaskExecutionPostponeDuration().getSeconds(); this.systemTaskRegistry = systemTaskRegistry; } public Task poll(String taskType, String workerId) { return poll(taskType, workerId, null); } public Task poll(String taskType, String workerId, String domain) { List<Task> tasks = poll(taskType, workerId, domain, 1, 100); if (tasks.isEmpty()) { return null; } return tasks.get(0); } public List<Task> poll(String taskType, String workerId, int count, int timeoutInMilliSecond) { return poll(taskType, workerId, null, count, timeoutInMilliSecond); } public List<Task> poll( String taskType, String workerId, String domain, int count, int timeoutInMilliSecond) { if (timeoutInMilliSecond > MAX_POLL_TIMEOUT_MS) { throw new IllegalArgumentException( "Long Poll Timeout value cannot be more than 5 seconds"); } String queueName = QueueUtils.getQueueName(taskType, domain, null, null); List<String> taskIds = new LinkedList<>(); List<Task> tasks = new LinkedList<>(); try { taskIds = queueDAO.pop(queueName, count, timeoutInMilliSecond); } catch (Exception e) { LOGGER.error( "Error polling for task: {} from worker: {} in domain: {}, count: {}", taskType, workerId, domain, count, e); Monitors.error(this.getClass().getCanonicalName(), "taskPoll"); Monitors.recordTaskPollError(taskType, domain, e.getClass().getSimpleName()); } for (String taskId : taskIds) { try { TaskModel taskModel = executionDAOFacade.getTaskModel(taskId); if (taskModel == null || taskModel.getStatus().isTerminal()) { // Remove taskId(s) without a valid Task/terminal state task from the queue queueDAO.remove(queueName, taskId); LOGGER.debug("Removed task: {} from the queue: {}", taskId, queueName); continue; } if (executionDAOFacade.exceedsInProgressLimit(taskModel)) { // Postpone this message, so that it would be available for poll again. queueDAO.postpone( queueName, taskId, taskModel.getWorkflowPriority(), queueTaskMessagePostponeSecs); LOGGER.debug( "Postponed task: {} in queue: {} by {} seconds", taskId, queueName, queueTaskMessagePostponeSecs); continue; } TaskDef taskDef = taskModel.getTaskDefinition().isPresent() ? taskModel.getTaskDefinition().get() : null; if (taskModel.getRateLimitPerFrequency() > 0 && executionDAOFacade.exceedsRateLimitPerFrequency(taskModel, taskDef)) { // Postpone this message, so that it would be available for poll again. queueDAO.postpone( queueName, taskId, taskModel.getWorkflowPriority(), queueTaskMessagePostponeSecs); LOGGER.debug( "RateLimit Execution limited for {}:{}, limit:{}", taskId, taskModel.getTaskDefName(), taskModel.getRateLimitPerFrequency()); continue; } taskModel.setStatus(TaskModel.Status.IN_PROGRESS); if (taskModel.getStartTime() == 0) { taskModel.setStartTime(System.currentTimeMillis()); Monitors.recordQueueWaitTime( taskModel.getTaskDefName(), taskModel.getQueueWaitTime()); } taskModel.setCallbackAfterSeconds( 0); // reset callbackAfterSeconds when giving the task to the worker taskModel.setWorkerId(workerId); taskModel.incrementPollCount(); executionDAOFacade.updateTask(taskModel); tasks.add(taskModel.toTask()); } catch (Exception e) { // db operation failed for dequeued message, re-enqueue with a delay LOGGER.warn( "DB operation failed for task: {}, postponing task in queue", taskId, e); Monitors.recordTaskPollError(taskType, domain, e.getClass().getSimpleName()); queueDAO.postpone(queueName, taskId, 0, queueTaskMessagePostponeSecs); } } executionDAOFacade.updateTaskLastPoll(taskType, domain, workerId); Monitors.recordTaskPoll(queueName); tasks.forEach(this::ackTaskReceived); return tasks; } public Task getLastPollTask(String taskType, String workerId, String domain) { List<Task> tasks = poll(taskType, workerId, domain, POLL_COUNT_ONE, POLLING_TIMEOUT_IN_MS); if (tasks.isEmpty()) { LOGGER.debug( "No Task available for the poll: /tasks/poll/{}?{}&{}", taskType, workerId, domain); return null; } Task task = tasks.get(0); ackTaskReceived(task); LOGGER.debug( "The Task {} being returned for /tasks/poll/{}?{}&{}", task, taskType, workerId, domain); return task; } public List<PollData> getPollData(String taskType) { return executionDAOFacade.getTaskPollData(taskType); } public List<PollData> getAllPollData() { try { return executionDAOFacade.getAllPollData(); } catch (UnsupportedOperationException uoe) { List<PollData> allPollData = new ArrayList<>(); Map<String, Long> queueSizes = queueDAO.queuesDetail(); queueSizes .keySet() .forEach( queueName -> { try { if (!queueName.contains(QueueUtils.DOMAIN_SEPARATOR)) { allPollData.addAll( getPollData( QueueUtils.getQueueNameWithoutDomain( queueName))); } } catch (Exception e) { LOGGER.error("Unable to fetch all poll data!", e); } }); return allPollData; } } public void terminateWorkflow(String workflowId, String reason) { workflowExecutor.terminateWorkflow(workflowId, reason); } public void updateTask(TaskResult taskResult) { workflowExecutor.updateTask(taskResult); } public List<Task> getTasks(String taskType, String startKey, int count) { return executionDAOFacade.getTasksByName(taskType, startKey, count); } public Task getTask(String taskId) { return executionDAOFacade.getTask(taskId); } public Task getPendingTaskForWorkflow(String taskReferenceName, String workflowId) { return executionDAOFacade.getTasksForWorkflow(workflowId).stream() .filter(task -> !task.getStatus().isTerminal()) .filter(task -> task.getReferenceTaskName().equals(taskReferenceName)) .findFirst() // There can only be one task by a given reference name running at a // time. .orElse(null); } /** * This method removes the task from the un-acked Queue * * @param taskId: the taskId that needs to be updated and removed from the unacked queue * @return True in case of successful removal of the taskId from the un-acked queue */ public boolean ackTaskReceived(String taskId) { return Optional.ofNullable(getTask(taskId)).map(this::ackTaskReceived).orElse(false); } public boolean ackTaskReceived(Task task) { return queueDAO.ack(QueueUtils.getQueueName(task), task.getTaskId()); } public Map<String, Integer> getTaskQueueSizes(List<String> taskDefNames) { Map<String, Integer> sizes = new HashMap<>(); for (String taskDefName : taskDefNames) { sizes.put(taskDefName, getTaskQueueSize(taskDefName)); } return sizes; } public Integer getTaskQueueSize(String queueName) { return queueDAO.getSize(queueName); } public void removeTaskFromQueue(String taskId) { Task task = getTask(taskId); if (task == null) { throw new NotFoundException("No such task found by taskId: %s", taskId); } queueDAO.remove(QueueUtils.getQueueName(task), taskId); } public int requeuePendingTasks(String taskType) { int count = 0; List<Task> tasks = getPendingTasksForTaskType(taskType); for (Task pending : tasks) { if (systemTaskRegistry.isSystemTask(pending.getTaskType())) { continue; } if (pending.getStatus().isTerminal()) { continue; } LOGGER.debug( "Requeuing Task: {} of taskType: {} in Workflow: {}", pending.getTaskId(), pending.getTaskType(), pending.getWorkflowInstanceId()); boolean pushed = requeue(pending); if (pushed) { count++; } } return count; } private boolean requeue(Task pending) { long callback = pending.getCallbackAfterSeconds(); if (callback < 0) { callback = 0; } queueDAO.remove(QueueUtils.getQueueName(pending), pending.getTaskId()); long now = System.currentTimeMillis(); callback = callback - ((now - pending.getUpdateTime()) / 1000); if (callback < 0) { callback = 0; } return queueDAO.pushIfNotExists( QueueUtils.getQueueName(pending), pending.getTaskId(), pending.getWorkflowPriority(), callback); } public List<Workflow> getWorkflowInstances( String workflowName, String correlationId, boolean includeClosed, boolean includeTasks) { List<Workflow> workflows = executionDAOFacade.getWorkflowsByCorrelationId(workflowName, correlationId, false); return workflows.stream() .parallel() .filter( workflow -> { if (includeClosed || workflow.getStatus() .equals(Workflow.WorkflowStatus.RUNNING)) { // including tasks for subset of workflows to increase performance if (includeTasks) { List<Task> tasks = executionDAOFacade.getTasksForWorkflow( workflow.getWorkflowId()); tasks.sort(Comparator.comparingInt(Task::getSeq)); workflow.setTasks(tasks); } return true; } else { return false; } }) .collect(Collectors.toList()); } public Workflow getExecutionStatus(String workflowId, boolean includeTasks) { return executionDAOFacade.getWorkflow(workflowId, includeTasks); } public List<String> getRunningWorkflows(String workflowName, int version) { return executionDAOFacade.getRunningWorkflowIds(workflowName, version); } public void removeWorkflow(String workflowId, boolean archiveWorkflow) { executionDAOFacade.removeWorkflow(workflowId, archiveWorkflow); } public SearchResult<WorkflowSummary> search( String query, String freeText, int start, int size, List<String> sortOptions) { return executionDAOFacade.searchWorkflowSummary(query, freeText, start, size, sortOptions); } public SearchResult<Workflow> searchV2( String query, String freeText, int start, int size, List<String> sortOptions) { SearchResult<String> result = executionDAOFacade.searchWorkflows(query, freeText, start, size, sortOptions); List<Workflow> workflows = result.getResults().stream() .parallel() .map( workflowId -> { try { return executionDAOFacade.getWorkflow(workflowId, false); } catch (Exception e) { LOGGER.error( "Error fetching workflow by id: {}", workflowId, e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toList()); int missing = result.getResults().size() - workflows.size(); long totalHits = result.getTotalHits() - missing; return new SearchResult<>(totalHits, workflows); } public SearchResult<WorkflowSummary> searchWorkflowByTasks( String query, String freeText, int start, int size, List<String> sortOptions) { SearchResult<TaskSummary> taskSummarySearchResult = searchTaskSummary(query, freeText, start, size, sortOptions); List<WorkflowSummary> workflowSummaries = taskSummarySearchResult.getResults().stream() .parallel() .map( taskSummary -> { try { String workflowId = taskSummary.getWorkflowId(); return new WorkflowSummary( executionDAOFacade.getWorkflow(workflowId, false)); } catch (Exception e) { LOGGER.error( "Error fetching workflow by id: {}", taskSummary.getWorkflowId(), e); return null; } }) .filter(Objects::nonNull) .distinct() .collect(Collectors.toList()); int missing = taskSummarySearchResult.getResults().size() - workflowSummaries.size(); long totalHits = taskSummarySearchResult.getTotalHits() - missing; return new SearchResult<>(totalHits, workflowSummaries); } public SearchResult<Workflow> searchWorkflowByTasksV2( String query, String freeText, int start, int size, List<String> sortOptions) { SearchResult<TaskSummary> taskSummarySearchResult = searchTasks(query, freeText, start, size, sortOptions); List<Workflow> workflows = taskSummarySearchResult.getResults().stream() .parallel() .map( taskSummary -> { try { String workflowId = taskSummary.getWorkflowId(); return executionDAOFacade.getWorkflow(workflowId, false); } catch (Exception e) { LOGGER.error( "Error fetching workflow by id: {}", taskSummary.getWorkflowId(), e); return null; } }) .filter(Objects::nonNull) .distinct() .collect(Collectors.toList()); int missing = taskSummarySearchResult.getResults().size() - workflows.size(); long totalHits = taskSummarySearchResult.getTotalHits() - missing; return new SearchResult<>(totalHits, workflows); } public SearchResult<TaskSummary> searchTasks( String query, String freeText, int start, int size, List<String> sortOptions) { SearchResult<String> result = executionDAOFacade.searchTasks(query, freeText, start, size, sortOptions); List<TaskSummary> workflows = result.getResults().stream() .parallel() .map( task -> { try { return new TaskSummary(executionDAOFacade.getTask(task)); } catch (Exception e) { LOGGER.error("Error fetching task by id: {}", task, e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toList()); int missing = result.getResults().size() - workflows.size(); long totalHits = result.getTotalHits() - missing; return new SearchResult<>(totalHits, workflows); } public SearchResult<TaskSummary> searchTaskSummary( String query, String freeText, int start, int size, List<String> sortOptions) { return executionDAOFacade.searchTaskSummary(query, freeText, start, size, sortOptions); } public SearchResult<TaskSummary> getSearchTasks( String query, String freeText, int start, /*@Max(value = MAX_SEARCH_SIZE, message = "Cannot return more than {value} workflows." + " Please use pagination.")*/ int size, String sortString) { return searchTaskSummary( query, freeText, start, size, Utils.convertStringToList(sortString)); } public SearchResult<Task> getSearchTasksV2( String query, String freeText, int start, int size, String sortString) { SearchResult<String> result = executionDAOFacade.searchTasks( query, freeText, start, size, Utils.convertStringToList(sortString)); List<Task> tasks = result.getResults().stream() .parallel() .map( task -> { try { return executionDAOFacade.getTask(task); } catch (Exception e) { LOGGER.error("Error fetching task by id: {}", task, e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toList()); int missing = result.getResults().size() - tasks.size(); long totalHits = result.getTotalHits() - missing; return new SearchResult<>(totalHits, tasks); } public List<Task> getPendingTasksForTaskType(String taskType) { return executionDAOFacade.getPendingTasksForTaskType(taskType); } public boolean addEventExecution(EventExecution eventExecution) { return executionDAOFacade.addEventExecution(eventExecution); } public void removeEventExecution(EventExecution eventExecution) { executionDAOFacade.removeEventExecution(eventExecution); } public void updateEventExecution(EventExecution eventExecution) { executionDAOFacade.updateEventExecution(eventExecution); } /** * @param queue Name of the registered queueDAO * @param msg Message */ public void addMessage(String queue, Message msg) { executionDAOFacade.addMessage(queue, msg); } /** * Adds task logs * * @param taskId Id of the task * @param log logs */ public void log(String taskId, String log) { TaskExecLog executionLog = new TaskExecLog(); executionLog.setTaskId(taskId); executionLog.setLog(log); executionLog.setCreatedTime(System.currentTimeMillis()); executionDAOFacade.addTaskExecLog(Collections.singletonList(executionLog)); } /** * @param taskId Id of the task for which to retrieve logs * @return Execution Logs (logged by the worker) */ public List<TaskExecLog> getTaskLogs(String taskId) { return executionDAOFacade.getTaskExecutionLogs(taskId); } /** * Get external uri for the payload * * @param path the path for which the external storage location is to be populated * @param operation the type of {@link Operation} to be performed * @param type the {@link PayloadType} at the external uri * @return the external uri at which the payload is stored/to be stored */ public ExternalStorageLocation getExternalStorageLocation( String path, String operation, String type) { try { ExternalPayloadStorage.Operation payloadOperation = ExternalPayloadStorage.Operation.valueOf(StringUtils.upperCase(operation)); ExternalPayloadStorage.PayloadType payloadType = ExternalPayloadStorage.PayloadType.valueOf(StringUtils.upperCase(type)); return externalPayloadStorage.getLocation(payloadOperation, payloadType, path); } catch (Exception e) { String errorMsg = String.format( "Invalid input - Operation: %s, PayloadType: %s", operation, type); LOGGER.error(errorMsg); throw new IllegalArgumentException(errorMsg); } } }
6,778
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/MetadataService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import java.util.Map; import java.util.Optional; import javax.validation.Valid; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import org.springframework.validation.annotation.Validated; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDefSummary; import com.netflix.conductor.common.model.BulkResponse; @Validated public interface MetadataService { /** * @param taskDefinitions Task Definitions to register */ void registerTaskDef( @NotNull(message = "TaskDefList cannot be empty or null") @Size(min = 1, message = "TaskDefList is empty") List<@Valid TaskDef> taskDefinitions); /** * @param taskDefinition Task Definition to be updated */ void updateTaskDef(@NotNull(message = "TaskDef cannot be null") @Valid TaskDef taskDefinition); /** * @param taskType Remove task definition */ void unregisterTaskDef(@NotEmpty(message = "TaskName cannot be null or empty") String taskType); /** * @return List of all the registered tasks */ List<TaskDef> getTaskDefs(); /** * @param taskType Task to retrieve * @return Task Definition */ TaskDef getTaskDef(@NotEmpty(message = "TaskType cannot be null or empty") String taskType); /** * @param def Workflow definition to be updated */ void updateWorkflowDef(@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef def); /** * @param workflowDefList Workflow definitions to be updated. */ BulkResponse updateWorkflowDef( @NotNull(message = "WorkflowDef list name cannot be null or empty") @Size(min = 1, message = "WorkflowDefList is empty") List<@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef> workflowDefList); /** * @param name Name of the workflow to retrieve * @param version Optional. Version. If null, then retrieves the latest * @return Workflow definition */ WorkflowDef getWorkflowDef( @NotEmpty(message = "Workflow name cannot be null or empty") String name, Integer version); /** * @param name Name of the workflow to retrieve * @return Latest version of the workflow definition */ Optional<WorkflowDef> getLatestWorkflow( @NotEmpty(message = "Workflow name cannot be null or empty") String name); /** * @return Returns all workflow defs (all versions) */ List<WorkflowDef> getWorkflowDefs(); /** * @return Returns workflow names and versions only (no definition bodies) */ Map<String, ? extends Iterable<WorkflowDefSummary>> getWorkflowNamesAndVersions(); void registerWorkflowDef( @NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef workflowDef); /** * Validates a {@link WorkflowDef}. * * @param workflowDef The {@link WorkflowDef} object. */ default void validateWorkflowDef( @NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef workflowDef) { // do nothing, WorkflowDef is annotated with @Valid and calling this method will validate it } /** * @param name Name of the workflow definition to be removed * @param version Version of the workflow definition to be removed */ void unregisterWorkflowDef( @NotEmpty(message = "Workflow name cannot be null or empty") String name, @NotNull(message = "Version cannot be null") Integer version); /** * @param eventHandler Event handler to be added. Will throw an exception if an event handler * already exists with the name */ void addEventHandler( @NotNull(message = "EventHandler cannot be null") @Valid EventHandler eventHandler); /** * @param eventHandler Event handler to be updated. */ void updateEventHandler( @NotNull(message = "EventHandler cannot be null") @Valid EventHandler eventHandler); /** * @param name Removes the event handler from the system */ void removeEventHandlerStatus( @NotEmpty(message = "EventName cannot be null or empty") String name); /** * @return All the event handlers registered in the system */ List<EventHandler> getAllEventHandlers(); /** * @param event name of the event * @param activeOnly if true, returns only the active handlers * @return Returns the list of all the event handlers for a given event */ List<EventHandler> getEventHandlersForEvent( @NotEmpty(message = "EventName cannot be null or empty") String event, boolean activeOnly); List<WorkflowDef> getWorkflowDefsLatestVersions(); }
6,779
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/EventServiceImpl.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import org.springframework.stereotype.Service; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.core.events.EventQueues; @Service public class EventServiceImpl implements EventService { private final MetadataService metadataService; public EventServiceImpl(MetadataService metadataService, EventQueues eventQueues) { this.metadataService = metadataService; } /** * Add a new event handler. * * @param eventHandler Instance of {@link EventHandler} */ public void addEventHandler(EventHandler eventHandler) { metadataService.addEventHandler(eventHandler); } /** * Update an existing event handler. * * @param eventHandler Instance of {@link EventHandler} */ public void updateEventHandler(EventHandler eventHandler) { metadataService.updateEventHandler(eventHandler); } /** * Remove an event handler. * * @param name Event name */ public void removeEventHandlerStatus(String name) { metadataService.removeEventHandlerStatus(name); } /** * Get all the event handlers. * * @return list of {@link EventHandler} */ public List<EventHandler> getEventHandlers() { return metadataService.getAllEventHandlers(); } /** * Get event handlers for a given event. * * @param event Event Name * @param activeOnly `true|false` for active only events * @return list of {@link EventHandler} */ public List<EventHandler> getEventHandlersForEvent(String event, boolean activeOnly) { return metadataService.getEventHandlersForEvent(event, activeOnly); } }
6,780
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.Size; import org.springframework.validation.annotation.Validated; import com.netflix.conductor.common.model.BulkResponse; @Validated public interface WorkflowBulkService { int MAX_REQUEST_ITEMS = 1000; BulkResponse pauseWorkflow( @NotEmpty(message = "WorkflowIds list cannot be null.") @Size( max = MAX_REQUEST_ITEMS, message = "Cannot process more than {max} workflows. Please use multiple requests.") List<String> workflowIds); BulkResponse resumeWorkflow( @NotEmpty(message = "WorkflowIds list cannot be null.") @Size( max = MAX_REQUEST_ITEMS, message = "Cannot process more than {max} workflows. Please use multiple requests.") List<String> workflowIds); BulkResponse restart( @NotEmpty(message = "WorkflowIds list cannot be null.") @Size( max = MAX_REQUEST_ITEMS, message = "Cannot process more than {max} workflows. Please use multiple requests.") List<String> workflowIds, boolean useLatestDefinitions); BulkResponse retry( @NotEmpty(message = "WorkflowIds list cannot be null.") @Size( max = MAX_REQUEST_ITEMS, message = "Cannot process more than {max} workflows. Please use multiple requests.") List<String> workflowIds); BulkResponse terminate( @NotEmpty(message = "WorkflowIds list cannot be null.") @Size( max = MAX_REQUEST_ITEMS, message = "Cannot process more than {max} workflows. Please use multiple requests.") List<String> workflowIds, String reason); }
6,781
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/EventService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import javax.validation.Valid; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import org.springframework.validation.annotation.Validated; import com.netflix.conductor.common.metadata.events.EventHandler; @Validated public interface EventService { /** * Add a new event handler. * * @param eventHandler Instance of {@link EventHandler} */ void addEventHandler( @NotNull(message = "EventHandler cannot be null.") @Valid EventHandler eventHandler); /** * Update an existing event handler. * * @param eventHandler Instance of {@link EventHandler} */ void updateEventHandler( @NotNull(message = "EventHandler cannot be null.") @Valid EventHandler eventHandler); /** * Remove an event handler. * * @param name Event name */ void removeEventHandlerStatus( @NotEmpty(message = "EventHandler name cannot be null or empty.") String name); /** * Get all the event handlers. * * @return list of {@link EventHandler} */ List<EventHandler> getEventHandlers(); /** * Get event handlers for a given event. * * @param event Event Name * @param activeOnly `true|false` for active only events * @return list of {@link EventHandler} */ List<EventHandler> getEventHandlersForEvent( @NotEmpty(message = "Event cannot be null or empty.") String event, boolean activeOnly); }
6,782
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.TreeSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDefSummary; import com.netflix.conductor.common.model.BulkResponse; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.validations.ValidationContext; @Service public class MetadataServiceImpl implements MetadataService { private static final Logger LOGGER = LoggerFactory.getLogger(MetadataServiceImpl.class); private final MetadataDAO metadataDAO; private final EventHandlerDAO eventHandlerDAO; public MetadataServiceImpl( MetadataDAO metadataDAO, EventHandlerDAO eventHandlerDAO, ConductorProperties properties) { this.metadataDAO = metadataDAO; this.eventHandlerDAO = eventHandlerDAO; ValidationContext.initialize(metadataDAO); OwnerEmailMandatoryConstraint.WorkflowTaskValidValidator.setOwnerEmailMandatory( properties.isOwnerEmailMandatory()); } /** * @param taskDefinitions Task Definitions to register */ public void registerTaskDef(List<TaskDef> taskDefinitions) { for (TaskDef taskDefinition : taskDefinitions) { taskDefinition.setCreatedBy(WorkflowContext.get().getClientApp()); taskDefinition.setCreateTime(System.currentTimeMillis()); taskDefinition.setUpdatedBy(null); taskDefinition.setUpdateTime(null); metadataDAO.createTaskDef(taskDefinition); } } @Override public void validateWorkflowDef(WorkflowDef workflowDef) { // do nothing, WorkflowDef is annotated with @Valid and calling this method will validate it } /** * @param taskDefinition Task Definition to be updated */ public void updateTaskDef(TaskDef taskDefinition) { TaskDef existing = metadataDAO.getTaskDef(taskDefinition.getName()); if (existing == null) { throw new NotFoundException("No such task by name %s", taskDefinition.getName()); } taskDefinition.setUpdatedBy(WorkflowContext.get().getClientApp()); taskDefinition.setUpdateTime(System.currentTimeMillis()); metadataDAO.updateTaskDef(taskDefinition); } /** * @param taskType Remove task definition */ public void unregisterTaskDef(String taskType) { metadataDAO.removeTaskDef(taskType); } /** * @return List of all the registered tasks */ public List<TaskDef> getTaskDefs() { return metadataDAO.getAllTaskDefs(); } /** * @param taskType Task to retrieve * @return Task Definition */ public TaskDef getTaskDef(String taskType) { TaskDef taskDef = metadataDAO.getTaskDef(taskType); if (taskDef == null) { throw new NotFoundException("No such taskType found by name: %s", taskType); } return taskDef; } /** * @param workflowDef Workflow definition to be updated */ public void updateWorkflowDef(WorkflowDef workflowDef) { workflowDef.setUpdateTime(System.currentTimeMillis()); metadataDAO.updateWorkflowDef(workflowDef); } /** * @param workflowDefList Workflow definitions to be updated. */ public BulkResponse updateWorkflowDef(List<WorkflowDef> workflowDefList) { BulkResponse bulkResponse = new BulkResponse(); for (WorkflowDef workflowDef : workflowDefList) { try { updateWorkflowDef(workflowDef); bulkResponse.appendSuccessResponse(workflowDef.getName()); } catch (Exception e) { LOGGER.error("bulk update workflow def failed, name {} ", workflowDef.getName(), e); bulkResponse.appendFailedResponse(workflowDef.getName(), e.getMessage()); } } return bulkResponse; } /** * @param name Name of the workflow to retrieve * @param version Optional. Version. If null, then retrieves the latest * @return Workflow definition */ public WorkflowDef getWorkflowDef(String name, Integer version) { Optional<WorkflowDef> workflowDef; if (version == null) { workflowDef = metadataDAO.getLatestWorkflowDef(name); } else { workflowDef = metadataDAO.getWorkflowDef(name, version); } return workflowDef.orElseThrow( () -> new NotFoundException( "No such workflow found by name: %s, version: %d", name, version)); } /** * @param name Name of the workflow to retrieve * @return Latest version of the workflow definition */ public Optional<WorkflowDef> getLatestWorkflow(String name) { return metadataDAO.getLatestWorkflowDef(name); } public List<WorkflowDef> getWorkflowDefs() { return metadataDAO.getAllWorkflowDefs(); } public void registerWorkflowDef(WorkflowDef workflowDef) { workflowDef.setCreateTime(System.currentTimeMillis()); metadataDAO.createWorkflowDef(workflowDef); } /** * @param name Name of the workflow definition to be removed * @param version Version of the workflow definition to be removed */ public void unregisterWorkflowDef(String name, Integer version) { metadataDAO.removeWorkflowDef(name, version); } /** * @param eventHandler Event handler to be added. Will throw an exception if an event handler * already exists with the name */ public void addEventHandler(EventHandler eventHandler) { eventHandlerDAO.addEventHandler(eventHandler); } /** * @param eventHandler Event handler to be updated. */ public void updateEventHandler(EventHandler eventHandler) { eventHandlerDAO.updateEventHandler(eventHandler); } /** * @param name Removes the event handler from the system */ public void removeEventHandlerStatus(String name) { eventHandlerDAO.removeEventHandler(name); } /** * @return All the event handlers registered in the system */ public List<EventHandler> getAllEventHandlers() { return eventHandlerDAO.getAllEventHandlers(); } /** * @param event name of the event * @param activeOnly if true, returns only the active handlers * @return Returns the list of all the event handlers for a given event */ public List<EventHandler> getEventHandlersForEvent(String event, boolean activeOnly) { return eventHandlerDAO.getEventHandlersForEvent(event, activeOnly); } @Override public List<WorkflowDef> getWorkflowDefsLatestVersions() { return metadataDAO.getAllWorkflowDefsLatestVersions(); } public Map<String, ? extends Iterable<WorkflowDefSummary>> getWorkflowNamesAndVersions() { List<WorkflowDef> workflowDefs = metadataDAO.getAllWorkflowDefs(); Map<String, TreeSet<WorkflowDefSummary>> retval = new HashMap<>(); for (WorkflowDef def : workflowDefs) { String workflowName = def.getName(); WorkflowDefSummary summary = fromWorkflowDef(def); retval.putIfAbsent(workflowName, new TreeSet<WorkflowDefSummary>()); TreeSet<WorkflowDefSummary> versions = retval.get(workflowName); versions.add(summary); } return retval; } private WorkflowDefSummary fromWorkflowDef(WorkflowDef def) { WorkflowDefSummary summary = new WorkflowDefSummary(); summary.setName(def.getName()); summary.setVersion(def.getVersion()); summary.setCreateTime(def.getCreateTime()); return summary; } }
6,783
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/TaskService.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.List; import java.util.Map; import javax.validation.Valid; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import org.springframework.validation.annotation.Validated; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.run.ExternalStorageLocation; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; @Validated public interface TaskService { /** * Poll for a task of a certain type. * * @param taskType Task name * @param workerId Id of the workflow * @param domain Domain of the workflow * @return polled {@link Task} */ Task poll( @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, String workerId, String domain); /** * Batch Poll for a task of a certain type. * * @param taskType Task Name * @param workerId Id of the workflow * @param domain Domain of the workflow * @param count Number of tasks * @param timeout Timeout for polling in milliseconds * @return list of {@link Task} */ List<Task> batchPoll( @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, String workerId, String domain, Integer count, Integer timeout); /** * Get in progress tasks. The results are paginated. * * @param taskType Task Name * @param startKey Start index of pagination * @param count Number of entries * @return list of {@link Task} */ List<Task> getTasks( @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, String startKey, Integer count); /** * Get in progress task for a given workflow id. * * @param workflowId Id of the workflow * @param taskReferenceName Task reference name. * @return instance of {@link Task} */ Task getPendingTaskForWorkflow( @NotEmpty(message = "WorkflowId cannot be null or empty.") String workflowId, @NotEmpty(message = "TaskReferenceName cannot be null or empty.") String taskReferenceName); /** * Updates a task. * * @param taskResult Instance of {@link TaskResult} * @return task Id of the updated task. */ String updateTask( @NotNull(message = "TaskResult cannot be null or empty.") @Valid TaskResult taskResult); /** * Ack Task is received. * * @param taskId Id of the task * @param workerId Id of the worker * @return `true|false` if task if received or not */ String ackTaskReceived( @NotEmpty(message = "TaskId cannot be null or empty.") String taskId, String workerId); /** * Ack Task is received. * * @param taskId Id of the task * @return `true|false` if task if received or not */ boolean ackTaskReceived(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId); /** * Log Task Execution Details. * * @param taskId Id of the task * @param log Details you want to log */ void log(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId, String log); /** * Get Task Execution Logs. * * @param taskId Id of the task. * @return list of {@link TaskExecLog} */ List<TaskExecLog> getTaskLogs( @NotEmpty(message = "TaskId cannot be null or empty.") String taskId); /** * Get task by Id. * * @param taskId Id of the task. * @return instance of {@link Task} */ Task getTask(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId); /** * Remove Task from a Task type queue. * * @param taskType Task Name * @param taskId ID of the task */ void removeTaskFromQueue( @NotEmpty(message = "TaskType cannot be null or empty.") String taskType, @NotEmpty(message = "TaskId cannot be null or empty.") String taskId); /** * Remove Task from a Task type queue. * * @param taskId ID of the task */ void removeTaskFromQueue(@NotEmpty(message = "TaskId cannot be null or empty.") String taskId); /** * Get Task type queue sizes. * * @param taskTypes List of task types. * @return map of task type as Key and queue size as value. */ Map<String, Integer> getTaskQueueSizes(List<String> taskTypes); /** * Get the queue size for a Task Type. The input can optionally include <code>domain</code>, * <code>isolationGroupId</code> and <code>executionNamespace</code>. * * @return */ Integer getTaskQueueSize( String taskType, String domain, String isolationGroupId, String executionNamespace); /** * Get the details about each queue. * * @return map of queue details. */ Map<String, Map<String, Map<String, Long>>> allVerbose(); /** * Get the details about each queue. * * @return map of details about each queue. */ Map<String, Long> getAllQueueDetails(); /** * Get the last poll data for a given task type. * * @param taskType Task Name * @return list of {@link PollData} */ List<PollData> getPollData( @NotEmpty(message = "TaskType cannot be null or empty.") String taskType); /** * Get the last poll data for all task types. * * @return list of {@link PollData} */ List<PollData> getAllPollData(); /** * Requeue pending tasks. * * @param taskType Task name. * @return number of tasks requeued. */ String requeuePendingTask( @NotEmpty(message = "TaskType cannot be null or empty.") String taskType); /** * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. * sort=name or sort=workflowId. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<TaskSummary> search( int start, int size, String sort, String freeText, String query); /** * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. * sort=name or sort=workflowId. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ SearchResult<Task> searchV2(int start, int size, String sort, String freeText, String query); /** * Get the external storage location where the task output payload is stored/to be stored * * @param path the path for which the external storage location is to be populated * @param operation the operation to be performed (read or write) * @param payloadType the type of payload (input or output) * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is * stored in external storage */ ExternalStorageLocation getExternalStorageLocation( String path, String operation, String payloadType); }
6,784
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/WorkflowTestService.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowTestRequest; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.model.TaskModel; @Component public class WorkflowTestService { private static final int MAX_LOOPS = 20_000; private static final Set<String> operators = new HashSet<>(); static { operators.add(TaskType.TASK_TYPE_JOIN); operators.add(TaskType.TASK_TYPE_DO_WHILE); operators.add(TaskType.TASK_TYPE_SET_VARIABLE); operators.add(TaskType.TASK_TYPE_FORK); operators.add(TaskType.TASK_TYPE_INLINE); operators.add(TaskType.TASK_TYPE_TERMINATE); operators.add(TaskType.TASK_TYPE_DECISION); operators.add(TaskType.TASK_TYPE_DYNAMIC); operators.add(TaskType.TASK_TYPE_FORK_JOIN); operators.add(TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC); operators.add(TaskType.TASK_TYPE_SWITCH); operators.add(TaskType.TASK_TYPE_SUB_WORKFLOW); } private final WorkflowService workflowService; private final ExecutionDAO executionDAO; private final ExecutionService workflowExecutionService; public WorkflowTestService( WorkflowService workflowService, ExecutionDAO executionDAO, ExecutionService workflowExecutionService) { this.workflowService = workflowService; this.executionDAO = executionDAO; this.workflowExecutionService = workflowExecutionService; } public Workflow testWorkflow(WorkflowTestRequest request) { request.setName(request.getName()); request.setVersion(request.getVersion()); String domain = UUID.randomUUID().toString(); // Ensure the workflows started for the testing are not picked by any workers request.getTaskToDomain().put("*", domain); String workflowId = workflowService.startWorkflow(request); return testWorkflow(request, workflowId); } private Workflow testWorkflow(WorkflowTestRequest request, String workflowId) { Map<String, List<WorkflowTestRequest.TaskMock>> mockData = request.getTaskRefToMockOutput(); Workflow workflow; int loopCount = 0; do { loopCount++; workflow = workflowService.getExecutionStatus(workflowId, true); if (loopCount > MAX_LOOPS) { // Short circuit to avoid large loops return workflow; } List<String> runningTasksMissingInput = workflow.getTasks().stream() .filter(task -> !operators.contains(task.getTaskType())) .filter(t -> !t.getStatus().isTerminal()) .filter(t2 -> !mockData.containsKey(t2.getReferenceTaskName())) .map(task -> task.getReferenceTaskName()) .collect(Collectors.toList()); if (!runningTasksMissingInput.isEmpty()) { break; } Stream<Task> runningTasks = workflow.getTasks().stream().filter(t -> !t.getStatus().isTerminal()); runningTasks.forEach( running -> { if (running.getTaskType().equals(TaskType.SUB_WORKFLOW.name())) { String subWorkflowId = running.getSubWorkflowId(); WorkflowTestRequest subWorkflowTestRequest = request.getSubWorkflowTestRequest() .get(running.getReferenceTaskName()); if (subWorkflowId != null && subWorkflowTestRequest != null) { testWorkflow(subWorkflowTestRequest, subWorkflowId); } } String refName = running.getReferenceTaskName(); List<WorkflowTestRequest.TaskMock> taskMock = mockData.get(refName); if (taskMock == null || taskMock.isEmpty() || operators.contains(running.getTaskType())) { mockData.remove(refName); workflowService.decideWorkflow(workflowId); } else { WorkflowTestRequest.TaskMock task = taskMock.remove(0); if (task.getExecutionTime() > 0 || task.getQueueWaitTime() > 0) { TaskModel existing = executionDAO.getTask(running.getTaskId()); existing.setScheduledTime( System.currentTimeMillis() - (task.getExecutionTime() + task.getQueueWaitTime())); existing.setStartTime( System.currentTimeMillis() - task.getExecutionTime()); existing.setStatus( TaskModel.Status.valueOf(task.getStatus().name())); existing.getOutputData().putAll(task.getOutput()); executionDAO.updateTask(existing); workflowService.decideWorkflow(workflowId); } else { TaskResult taskResult = new TaskResult(running); taskResult.setStatus(task.getStatus()); taskResult.getOutputData().putAll(task.getOutput()); workflowExecutionService.updateTask(taskResult); } } }); } while (!workflow.getStatus().isTerminal() && !mockData.isEmpty()); return workflow; } }
6,785
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/TaskServiceImpl.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.netflix.conductor.annotations.Audit; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.run.ExternalStorageLocation; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.core.utils.QueueUtils; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.metrics.Monitors; @Audit @Trace @Service public class TaskServiceImpl implements TaskService { private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); private final ExecutionService executionService; private final QueueDAO queueDAO; public TaskServiceImpl(ExecutionService executionService, QueueDAO queueDAO) { this.executionService = executionService; this.queueDAO = queueDAO; } /** * Poll for a task of a certain type. * * @param taskType Task name * @param workerId id of the workflow * @param domain Domain of the workflow * @return polled {@link Task} */ public Task poll(String taskType, String workerId, String domain) { LOGGER.debug("Task being polled: /tasks/poll/{}?{}&{}", taskType, workerId, domain); Task task = executionService.getLastPollTask(taskType, workerId, domain); if (task != null) { LOGGER.debug( "The Task {} being returned for /tasks/poll/{}?{}&{}", task, taskType, workerId, domain); } Monitors.recordTaskPollCount(taskType, domain, 1); return task; } /** * Batch Poll for a task of a certain type. * * @param taskType Task Name * @param workerId id of the workflow * @param domain Domain of the workflow * @param count Number of tasks * @param timeout Timeout for polling in milliseconds * @return list of {@link Task} */ public List<Task> batchPoll( String taskType, String workerId, String domain, Integer count, Integer timeout) { List<Task> polledTasks = executionService.poll(taskType, workerId, domain, count, timeout); LOGGER.debug( "The Tasks {} being returned for /tasks/poll/{}?{}&{}", polledTasks.stream().map(Task::getTaskId).collect(Collectors.toList()), taskType, workerId, domain); Monitors.recordTaskPollCount(taskType, domain, polledTasks.size()); return polledTasks; } /** * Get in progress tasks. The results are paginated. * * @param taskType Task Name * @param startKey Start index of pagination * @param count Number of entries * @return list of {@link Task} */ public List<Task> getTasks(String taskType, String startKey, Integer count) { return executionService.getTasks(taskType, startKey, count); } /** * Get in progress task for a given workflow id. * * @param workflowId id of the workflow * @param taskReferenceName Task reference name. * @return instance of {@link Task} */ public Task getPendingTaskForWorkflow(String workflowId, String taskReferenceName) { return executionService.getPendingTaskForWorkflow(taskReferenceName, workflowId); } /** * Updates a task. * * @param taskResult Instance of {@link TaskResult} * @return task Id of the updated task. */ public String updateTask(TaskResult taskResult) { LOGGER.debug( "Update Task: {} with callback time: {}", taskResult, taskResult.getCallbackAfterSeconds()); executionService.updateTask(taskResult); LOGGER.debug( "Task: {} updated successfully with callback time: {}", taskResult, taskResult.getCallbackAfterSeconds()); return taskResult.getTaskId(); } /** * Ack Task is received. * * @param taskId id of the task * @param workerId id of the worker * @return `true|false` if task is received or not */ public String ackTaskReceived(String taskId, String workerId) { LOGGER.debug("Ack received for task: {} from worker: {}", taskId, workerId); return String.valueOf(ackTaskReceived(taskId)); } /** * Ack Task is received. * * @param taskId id of the task * @return `true|false` if task is received or not */ public boolean ackTaskReceived(String taskId) { LOGGER.debug("Ack received for task: {}", taskId); AtomicBoolean ackResult = new AtomicBoolean(false); try { ackResult.set(executionService.ackTaskReceived(taskId)); } catch (Exception e) { // Fail the task and let decide reevaluate the workflow, thereby preventing workflow // being stuck from transient ack errors. String errorMsg = String.format("Error when trying to ack task %s", taskId); LOGGER.error(errorMsg, e); Task task = executionService.getTask(taskId); Monitors.recordAckTaskError(task.getTaskType()); failTask(task, errorMsg); ackResult.set(false); } return ackResult.get(); } /** Updates the task with FAILED status; On exception, fails the workflow. */ private void failTask(Task task, String errorMsg) { try { TaskResult taskResult = new TaskResult(); taskResult.setStatus(TaskResult.Status.FAILED); taskResult.setTaskId(task.getTaskId()); taskResult.setWorkflowInstanceId(task.getWorkflowInstanceId()); taskResult.setReasonForIncompletion(errorMsg); executionService.updateTask(taskResult); } catch (Exception e) { LOGGER.error( "Unable to fail task: {} in workflow: {}", task.getTaskId(), task.getWorkflowInstanceId(), e); executionService.terminateWorkflow( task.getWorkflowInstanceId(), "Failed to ack task: " + task.getTaskId()); } } /** * Log Task Execution Details. * * @param taskId id of the task * @param log Details you want to log */ public void log(String taskId, String log) { executionService.log(taskId, log); } /** * Get Task Execution Logs. * * @param taskId id of the task. * @return list of {@link TaskExecLog} */ public List<TaskExecLog> getTaskLogs(String taskId) { return executionService.getTaskLogs(taskId); } /** * Get task by Id. * * @param taskId id of the task. * @return instance of {@link Task} */ public Task getTask(String taskId) { return executionService.getTask(taskId); } /** * Remove Task from a Task type queue. * * @param taskType Task Name * @param taskId ID of the task */ public void removeTaskFromQueue(String taskType, String taskId) { executionService.removeTaskFromQueue(taskId); } /** * Remove Task from a Task type queue. * * @param taskId ID of the task */ public void removeTaskFromQueue(String taskId) { executionService.removeTaskFromQueue(taskId); } /** * Get Task type queue sizes. * * @param taskTypes List of task types. * @return map of task type as Key and queue size as value. */ public Map<String, Integer> getTaskQueueSizes(List<String> taskTypes) { return executionService.getTaskQueueSizes(taskTypes); } @Override public Integer getTaskQueueSize( String taskType, String domain, String isolationGroupId, String executionNamespace) { String queueName = QueueUtils.getQueueName( taskType, StringUtils.trimToNull(domain), StringUtils.trimToNull(isolationGroupId), StringUtils.trimToNull(executionNamespace)); return executionService.getTaskQueueSize(queueName); } /** * Get the details about each queue. * * @return map of queue details. */ public Map<String, Map<String, Map<String, Long>>> allVerbose() { return queueDAO.queuesDetailVerbose(); } /** * Get the details about each queue. * * @return map of details about each queue. */ public Map<String, Long> getAllQueueDetails() { return queueDAO.queuesDetail().entrySet().stream() .sorted(Entry.comparingByKey()) .collect( Collectors.toMap( Entry::getKey, Entry::getValue, (v1, v2) -> v1, LinkedHashMap::new)); } /** * Get the last poll data for a given task type. * * @param taskType Task Name * @return list of {@link PollData} */ public List<PollData> getPollData(String taskType) { return executionService.getPollData(taskType); } /** * Get the last poll data for all task types. * * @return list of {@link PollData} */ public List<PollData> getAllPollData() { return executionService.getAllPollData(); } /** * Requeue pending tasks. * * @param taskType Task name. * @return number of tasks requeued. */ public String requeuePendingTask(String taskType) { return String.valueOf(executionService.requeuePendingTasks(taskType)); } /** * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. * sort=name or sort=workflowId. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<TaskSummary> search( int start, int size, String sort, String freeText, String query) { return executionService.getSearchTasks(query, freeText, start, size, sort); } /** * Search for tasks based in payload and other parameters. Use sort options as ASC or DESC e.g. * sort=name or sort=workflowId. If order is not specified, defaults to ASC. * * @param start Start index of pagination * @param size Number of entries * @param sort Sorting type ASC|DESC * @param freeText Text you want to search * @param query Query you want to search * @return instance of {@link SearchResult} */ public SearchResult<Task> searchV2( int start, int size, String sort, String freeText, String query) { return executionService.getSearchTasksV2(query, freeText, start, size, sort); } /** * Get the external storage location where the task output payload is stored/to be stored * * @param path the path for which the external storage location is to be populated * @param operation the operation to be performed (read or write) * @param type the type of payload (input or output) * @return {@link ExternalStorageLocation} containing the uri and the path to the payload is * stored in external storage */ public ExternalStorageLocation getExternalStorageLocation( String path, String operation, String type) { return executionService.getExternalStorageLocation(path, operation, type); } }
6,786
0
Create_ds/conductor/core/src/main/java/com/netflix/conductor
Create_ds/conductor/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.service; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.springframework.boot.info.BuildProperties; import org.springframework.stereotype.Service; import com.netflix.conductor.annotations.Audit; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.events.EventQueueManager; import com.netflix.conductor.core.reconciliation.WorkflowRepairService; import com.netflix.conductor.core.utils.Utils; import com.netflix.conductor.dao.QueueDAO; @Audit @Trace @Service public class AdminServiceImpl implements AdminService { private final ConductorProperties properties; private final ExecutionService executionService; private final QueueDAO queueDAO; private final WorkflowRepairService workflowRepairService; private final EventQueueManager eventQueueManager; private final BuildProperties buildProperties; public AdminServiceImpl( ConductorProperties properties, ExecutionService executionService, QueueDAO queueDAO, Optional<WorkflowRepairService> workflowRepairService, Optional<EventQueueManager> eventQueueManager, Optional<BuildProperties> buildProperties) { this.properties = properties; this.executionService = executionService; this.queueDAO = queueDAO; this.workflowRepairService = workflowRepairService.orElse(null); this.eventQueueManager = eventQueueManager.orElse(null); this.buildProperties = buildProperties.orElse(null); } /** * Get all the configuration parameters. * * @return all the configuration parameters. */ public Map<String, Object> getAllConfig() { Map<String, Object> configs = properties.getAll(); configs.putAll(getBuildProperties()); return configs; } /** * Get all build properties * * @return all the build properties. */ private Map<String, Object> getBuildProperties() { if (buildProperties == null) return Collections.emptyMap(); Map<String, Object> buildProps = new HashMap<>(); buildProps.put("version", buildProperties.getVersion()); buildProps.put("buildDate", buildProperties.getTime()); return buildProps; } /** * Get the list of pending tasks for a given task type. * * @param taskType Name of the task * @param start Start index of pagination * @param count Number of entries * @return list of pending {@link Task} */ public List<Task> getListOfPendingTask(String taskType, Integer start, Integer count) { List<Task> tasks = executionService.getPendingTasksForTaskType(taskType); int total = start + count; total = Math.min(tasks.size(), total); if (start > tasks.size()) { start = tasks.size(); } return tasks.subList(start, total); } @Override public boolean verifyAndRepairWorkflowConsistency(String workflowId) { if (workflowRepairService == null) { throw new IllegalStateException( WorkflowRepairService.class.getSimpleName() + " is disabled."); } return workflowRepairService.verifyAndRepairWorkflow(workflowId, true); } /** * Queue up the workflow for sweep. * * @param workflowId Id of the workflow * @return the id of the workflow instance that can be use for tracking. */ public String requeueSweep(String workflowId) { boolean pushed = queueDAO.pushIfNotExists( Utils.DECIDER_QUEUE, workflowId, properties.getWorkflowOffsetTimeout().getSeconds()); return pushed + "." + workflowId; } /** * Get registered queues. * * @param verbose `true|false` for verbose logs * @return map of event queues */ public Map<String, ?> getEventQueues(boolean verbose) { if (eventQueueManager == null) { throw new IllegalStateException("Event processing is DISABLED"); } return (verbose ? eventQueueManager.getQueueSizes() : eventQueueManager.getQueues()); } }
6,787
0
Create_ds/conductor/awss3-storage/src/main/java/com/netflix/conductor/s3
Create_ds/conductor/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Configuration.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.s3.config; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import com.netflix.conductor.common.utils.ExternalPayloadStorage; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.s3.storage.S3PayloadStorage; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; @Configuration @EnableConfigurationProperties(S3Properties.class) @ConditionalOnProperty(name = "conductor.external-payload-storage.type", havingValue = "s3") public class S3Configuration { @Bean public ExternalPayloadStorage s3ExternalPayloadStorage( IDGenerator idGenerator, S3Properties properties, AmazonS3 s3Client) { return new S3PayloadStorage(idGenerator, properties, s3Client); } @ConditionalOnProperty( name = "conductor.external-payload-storage.s3.use_default_client", havingValue = "true", matchIfMissing = true) @Bean public AmazonS3 amazonS3(S3Properties properties) { return AmazonS3ClientBuilder.standard().withRegion(properties.getRegion()).build(); } }
6,788
0
Create_ds/conductor/awss3-storage/src/main/java/com/netflix/conductor/s3
Create_ds/conductor/awss3-storage/src/main/java/com/netflix/conductor/s3/config/S3Properties.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.s3.config; import java.time.Duration; import java.time.temporal.ChronoUnit; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.convert.DurationUnit; @ConfigurationProperties("conductor.external-payload-storage.s3") public class S3Properties { /** The s3 bucket name where the payloads will be stored */ private String bucketName = "conductor_payloads"; /** The time (in seconds) for which the signed url will be valid */ @DurationUnit(ChronoUnit.SECONDS) private Duration signedUrlExpirationDuration = Duration.ofSeconds(5); /** The AWS region of the s3 bucket */ private String region = "us-east-1"; public String getBucketName() { return bucketName; } public void setBucketName(String bucketName) { this.bucketName = bucketName; } public Duration getSignedUrlExpirationDuration() { return signedUrlExpirationDuration; } public void setSignedUrlExpirationDuration(Duration signedUrlExpirationDuration) { this.signedUrlExpirationDuration = signedUrlExpirationDuration; } public String getRegion() { return region; } public void setRegion(String region) { this.region = region; } }
6,789
0
Create_ds/conductor/awss3-storage/src/main/java/com/netflix/conductor/s3
Create_ds/conductor/awss3-storage/src/main/java/com/netflix/conductor/s3/storage/S3PayloadStorage.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.s3.storage; import java.io.InputStream; import java.net.URISyntaxException; import java.util.Date; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.netflix.conductor.common.run.ExternalStorageLocation; import com.netflix.conductor.common.utils.ExternalPayloadStorage; import com.netflix.conductor.core.exception.NonTransientException; import com.netflix.conductor.core.exception.TransientException; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.s3.config.S3Properties; import com.amazonaws.HttpMethod; import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.*; /** * An implementation of {@link ExternalPayloadStorage} using AWS S3 for storing large JSON payload * data. * * <p><em>NOTE: The S3 client assumes that access to S3 is configured on the instance.</em> * * @see <a * href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/index.html?com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html">DefaultAWSCredentialsProviderChain</a> */ public class S3PayloadStorage implements ExternalPayloadStorage { private static final Logger LOGGER = LoggerFactory.getLogger(S3PayloadStorage.class); private static final String CONTENT_TYPE = "application/json"; private final IDGenerator idGenerator; private final AmazonS3 s3Client; private final String bucketName; private final long expirationSec; public S3PayloadStorage(IDGenerator idGenerator, S3Properties properties, AmazonS3 s3Client) { this.idGenerator = idGenerator; this.s3Client = s3Client; bucketName = properties.getBucketName(); expirationSec = properties.getSignedUrlExpirationDuration().getSeconds(); } /** * @param operation the type of {@link Operation} to be performed * @param payloadType the {@link PayloadType} that is being accessed * @return a {@link ExternalStorageLocation} object which contains the pre-signed URL and the s3 * object key for the json payload */ @Override public ExternalStorageLocation getLocation( Operation operation, PayloadType payloadType, String path) { try { ExternalStorageLocation externalStorageLocation = new ExternalStorageLocation(); Date expiration = new Date(); long expTimeMillis = expiration.getTime() + 1000 * expirationSec; expiration.setTime(expTimeMillis); HttpMethod httpMethod = HttpMethod.GET; if (operation == Operation.WRITE) { httpMethod = HttpMethod.PUT; } String objectKey; if (StringUtils.isNotBlank(path)) { objectKey = path; } else { objectKey = getObjectKey(payloadType); } externalStorageLocation.setPath(objectKey); GeneratePresignedUrlRequest generatePresignedUrlRequest = new GeneratePresignedUrlRequest(bucketName, objectKey) .withMethod(httpMethod) .withExpiration(expiration); externalStorageLocation.setUri( s3Client.generatePresignedUrl(generatePresignedUrlRequest) .toURI() .toASCIIString()); return externalStorageLocation; } catch (SdkClientException e) { String msg = String.format( "Error communicating with S3 - operation:%s, payloadType: %s, path: %s", operation, payloadType, path); LOGGER.error(msg, e); throw new TransientException(msg, e); } catch (URISyntaxException e) { String msg = "Invalid URI Syntax"; LOGGER.error(msg, e); throw new NonTransientException(msg, e); } } /** * Uploads the payload to the given s3 object key. It is expected that the caller retrieves the * object key using {@link #getLocation(Operation, PayloadType, String)} before making this * call. * * @param path the s3 key of the object to be uploaded * @param payload an {@link InputStream} containing the json payload which is to be uploaded * @param payloadSize the size of the json payload in bytes */ @Override public void upload(String path, InputStream payload, long payloadSize) { try { ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setContentType(CONTENT_TYPE); objectMetadata.setContentLength(payloadSize); PutObjectRequest request = new PutObjectRequest(bucketName, path, payload, objectMetadata); s3Client.putObject(request); } catch (SdkClientException e) { String msg = String.format( "Error uploading to S3 - path:%s, payloadSize: %d", path, payloadSize); LOGGER.error(msg, e); throw new TransientException(msg, e); } } /** * Downloads the payload stored in the s3 object. * * @param path the S3 key of the object * @return an input stream containing the contents of the object Caller is expected to close the * input stream. */ @Override public InputStream download(String path) { try { S3Object s3Object = s3Client.getObject(new GetObjectRequest(bucketName, path)); return s3Object.getObjectContent(); } catch (SdkClientException e) { String msg = String.format("Error downloading from S3 - path:%s", path); LOGGER.error(msg, e); throw new TransientException(msg, e); } } private String getObjectKey(PayloadType payloadType) { StringBuilder stringBuilder = new StringBuilder(); switch (payloadType) { case WORKFLOW_INPUT: stringBuilder.append("workflow/input/"); break; case WORKFLOW_OUTPUT: stringBuilder.append("workflow/output/"); break; case TASK_INPUT: stringBuilder.append("task/input/"); break; case TASK_OUTPUT: stringBuilder.append("task/output/"); break; } stringBuilder.append(idGenerator.generate()).append(".json"); return stringBuilder.toString(); } }
6,790
0
Create_ds/conductor/client-spring/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client-spring/src/test/java/com/netflix/conductor/client/spring/Workers.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.spring; import java.util.Date; import org.springframework.stereotype.Component; import com.netflix.conductor.sdk.workflow.executor.task.TaskContext; import com.netflix.conductor.sdk.workflow.task.InputParam; import com.netflix.conductor.sdk.workflow.task.WorkerTask; @Component public class Workers { @WorkerTask(value = "hello", threadCount = 3) public String helloWorld(@InputParam("name") String name) { TaskContext context = TaskContext.get(); System.out.println(new Date() + ":: Poll count: " + context.getPollCount()); if (context.getPollCount() < 5) { context.addLog("Not ready yet, poll count is only " + context.getPollCount()); context.setCallbackAfter(1); } return "Hello, " + name; } @WorkerTask(value = "hello_again", pollingInterval = 333) public String helloAgain(@InputParam("name") String name) { TaskContext context = TaskContext.get(); System.out.println(new Date() + ":: Poll count: " + context.getPollCount()); if (context.getPollCount() < 5) { context.addLog("Not ready yet, poll count is only " + context.getPollCount()); context.setCallbackAfter(1); } return "Hello (again), " + name; } }
6,791
0
Create_ds/conductor/client-spring/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client-spring/src/test/java/com/netflix/conductor/client/spring/ExampleClient.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.spring; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.Bean; import com.netflix.conductor.client.worker.Worker; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskResult; @SpringBootApplication public class ExampleClient { public static void main(String[] args) { SpringApplication.run(ExampleClient.class, args); } @Bean public Worker worker() { return new Worker() { @Override public String getTaskDefName() { return "taskDef"; } @Override public TaskResult execute(Task task) { return new TaskResult(task); } }; } }
6,792
0
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client/spring/ClientProperties.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.spring; import java.time.Duration; import java.util.HashMap; import java.util.Map; import org.springframework.boot.context.properties.ConfigurationProperties; @ConfigurationProperties("conductor.client") public class ClientProperties { private String rootUri; private String workerNamePrefix = "workflow-worker-%d"; private int threadCount = 1; private Duration sleepWhenRetryDuration = Duration.ofMillis(500); private int updateRetryCount = 3; private Map<String, String> taskToDomain = new HashMap<>(); private Map<String, Integer> taskThreadCount = new HashMap<>(); private int shutdownGracePeriodSeconds = 10; public String getRootUri() { return rootUri; } public void setRootUri(String rootUri) { this.rootUri = rootUri; } public String getWorkerNamePrefix() { return workerNamePrefix; } public void setWorkerNamePrefix(String workerNamePrefix) { this.workerNamePrefix = workerNamePrefix; } public int getThreadCount() { return threadCount; } public void setThreadCount(int threadCount) { this.threadCount = threadCount; } public Duration getSleepWhenRetryDuration() { return sleepWhenRetryDuration; } public void setSleepWhenRetryDuration(Duration sleepWhenRetryDuration) { this.sleepWhenRetryDuration = sleepWhenRetryDuration; } public int getUpdateRetryCount() { return updateRetryCount; } public void setUpdateRetryCount(int updateRetryCount) { this.updateRetryCount = updateRetryCount; } public Map<String, String> getTaskToDomain() { return taskToDomain; } public void setTaskToDomain(Map<String, String> taskToDomain) { this.taskToDomain = taskToDomain; } public int getShutdownGracePeriodSeconds() { return shutdownGracePeriodSeconds; } public void setShutdownGracePeriodSeconds(int shutdownGracePeriodSeconds) { this.shutdownGracePeriodSeconds = shutdownGracePeriodSeconds; } public Map<String, Integer> getTaskThreadCount() { return taskThreadCount; } public void setTaskThreadCount(Map<String, Integer> taskThreadCount) { this.taskThreadCount = taskThreadCount; } }
6,793
0
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client/spring/ConductorClientAutoConfiguration.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.spring; import java.util.ArrayList; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import com.netflix.conductor.client.automator.TaskRunnerConfigurer; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.worker.Worker; import com.netflix.conductor.sdk.workflow.executor.task.AnnotatedWorkerExecutor; import com.netflix.discovery.EurekaClient; @Configuration(proxyBeanMethods = false) @EnableConfigurationProperties(ClientProperties.class) public class ConductorClientAutoConfiguration { @Autowired(required = false) private EurekaClient eurekaClient; @Autowired(required = false) private List<Worker> workers = new ArrayList<>(); @ConditionalOnMissingBean @Bean public TaskClient taskClient(ClientProperties clientProperties) { TaskClient taskClient = new TaskClient(); taskClient.setRootURI(clientProperties.getRootUri()); return taskClient; } @ConditionalOnMissingBean @Bean public AnnotatedWorkerExecutor annotatedWorkerExecutor(TaskClient taskClient) { return new AnnotatedWorkerExecutor(taskClient); } @ConditionalOnMissingBean @Bean(initMethod = "init", destroyMethod = "shutdown") public TaskRunnerConfigurer taskRunnerConfigurer( TaskClient taskClient, ClientProperties clientProperties) { return new TaskRunnerConfigurer.Builder(taskClient, workers) .withTaskThreadCount(clientProperties.getTaskThreadCount()) .withThreadCount(clientProperties.getThreadCount()) .withSleepWhenRetry((int) clientProperties.getSleepWhenRetryDuration().toMillis()) .withUpdateRetryCount(clientProperties.getUpdateRetryCount()) .withTaskToDomain(clientProperties.getTaskToDomain()) .withShutdownGracePeriodSeconds(clientProperties.getShutdownGracePeriodSeconds()) .withEurekaClient(eurekaClient) .build(); } }
6,794
0
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client/spring/ConductorWorkerAutoConfiguration.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.spring; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.core.env.Environment; import org.springframework.stereotype.Component; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.sdk.workflow.executor.task.AnnotatedWorkerExecutor; import com.netflix.conductor.sdk.workflow.executor.task.WorkerConfiguration; @Component public class ConductorWorkerAutoConfiguration implements ApplicationListener<ContextRefreshedEvent> { @Autowired private TaskClient taskClient; @Override public void onApplicationEvent(ContextRefreshedEvent refreshedEvent) { ApplicationContext applicationContext = refreshedEvent.getApplicationContext(); Environment environment = applicationContext.getEnvironment(); WorkerConfiguration configuration = new SpringWorkerConfiguration(environment); AnnotatedWorkerExecutor annotatedWorkerExecutor = new AnnotatedWorkerExecutor(taskClient, configuration); Map<String, Object> beans = applicationContext.getBeansWithAnnotation(Component.class); beans.values() .forEach( bean -> { annotatedWorkerExecutor.addBean(bean); }); annotatedWorkerExecutor.startPolling(); } }
6,795
0
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client
Create_ds/conductor/client-spring/src/main/java/com/netflix/conductor/client/spring/SpringWorkerConfiguration.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.spring; import org.springframework.core.env.Environment; import com.netflix.conductor.sdk.workflow.executor.task.WorkerConfiguration; public class SpringWorkerConfiguration extends WorkerConfiguration { private final Environment environment; public SpringWorkerConfiguration(Environment environment) { this.environment = environment; } @Override public int getPollingInterval(String taskName) { String key = "conductor.worker." + taskName + ".pollingInterval"; return environment.getProperty(key, Integer.class, 0); } @Override public int getThreadCount(String taskName) { String key = "conductor.worker." + taskName + ".threadCount"; return environment.getProperty(key, Integer.class, 0); } @Override public String getDomain(String taskName) { String key = "conductor.worker." + taskName + ".domain"; return environment.getProperty(key, String.class, null); } }
6,796
0
Create_ds/conductor/grpc/src/test/java/com/netflix/conductor
Create_ds/conductor/grpc/src/test/java/com/netflix/conductor/grpc/TestProtoMapper.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.proto.WorkflowTaskPb; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; public class TestProtoMapper { private final ProtoMapper mapper = ProtoMapper.INSTANCE; @Test public void workflowTaskToProto() { final WorkflowTask taskWithDefaultRetryCount = new WorkflowTask(); final WorkflowTask taskWith1RetryCount = new WorkflowTask(); taskWith1RetryCount.setRetryCount(1); final WorkflowTask taskWithNoRetryCount = new WorkflowTask(); taskWithNoRetryCount.setRetryCount(0); assertEquals(-1, mapper.toProto(taskWithDefaultRetryCount).getRetryCount()); assertEquals(1, mapper.toProto(taskWith1RetryCount).getRetryCount()); assertEquals(0, mapper.toProto(taskWithNoRetryCount).getRetryCount()); } @Test public void workflowTaskFromProto() { final WorkflowTaskPb.WorkflowTask taskWithDefaultRetryCount = WorkflowTaskPb.WorkflowTask.newBuilder().build(); final WorkflowTaskPb.WorkflowTask taskWith1RetryCount = WorkflowTaskPb.WorkflowTask.newBuilder().setRetryCount(1).build(); final WorkflowTaskPb.WorkflowTask taskWithNoRetryCount = WorkflowTaskPb.WorkflowTask.newBuilder().setRetryCount(-1).build(); assertEquals(new Integer(0), mapper.fromProto(taskWithDefaultRetryCount).getRetryCount()); assertEquals(1, mapper.fromProto(taskWith1RetryCount).getRetryCount().intValue()); assertNull(mapper.fromProto(taskWithNoRetryCount).getRetryCount()); } }
6,797
0
Create_ds/conductor/grpc/src/main/java/com/netflix/conductor
Create_ds/conductor/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java
package com.netflix.conductor.grpc; import com.google.protobuf.Any; import com.google.protobuf.Value; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask; import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDefSummary; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.proto.DynamicForkJoinTaskListPb; import com.netflix.conductor.proto.DynamicForkJoinTaskPb; import com.netflix.conductor.proto.EventExecutionPb; import com.netflix.conductor.proto.EventHandlerPb; import com.netflix.conductor.proto.PollDataPb; import com.netflix.conductor.proto.RerunWorkflowRequestPb; import com.netflix.conductor.proto.SkipTaskRequestPb; import com.netflix.conductor.proto.StartWorkflowRequestPb; import com.netflix.conductor.proto.SubWorkflowParamsPb; import com.netflix.conductor.proto.TaskDefPb; import com.netflix.conductor.proto.TaskExecLogPb; import com.netflix.conductor.proto.TaskPb; import com.netflix.conductor.proto.TaskResultPb; import com.netflix.conductor.proto.TaskSummaryPb; import com.netflix.conductor.proto.WorkflowDefPb; import com.netflix.conductor.proto.WorkflowDefSummaryPb; import com.netflix.conductor.proto.WorkflowPb; import com.netflix.conductor.proto.WorkflowSummaryPb; import com.netflix.conductor.proto.WorkflowTaskPb; import java.lang.IllegalArgumentException; import java.lang.Object; import java.lang.String; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.annotation.Generated; @Generated("com.netflix.conductor.annotationsprocessor.protogen") public abstract class AbstractProtoMapper { public DynamicForkJoinTaskPb.DynamicForkJoinTask toProto(DynamicForkJoinTask from) { DynamicForkJoinTaskPb.DynamicForkJoinTask.Builder to = DynamicForkJoinTaskPb.DynamicForkJoinTask.newBuilder(); if (from.getTaskName() != null) { to.setTaskName( from.getTaskName() ); } if (from.getWorkflowName() != null) { to.setWorkflowName( from.getWorkflowName() ); } if (from.getReferenceName() != null) { to.setReferenceName( from.getReferenceName() ); } for (Map.Entry<String, Object> pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getType() != null) { to.setType( from.getType() ); } return to.build(); } public DynamicForkJoinTask fromProto(DynamicForkJoinTaskPb.DynamicForkJoinTask from) { DynamicForkJoinTask to = new DynamicForkJoinTask(); to.setTaskName( from.getTaskName() ); to.setWorkflowName( from.getWorkflowName() ); to.setReferenceName( from.getReferenceName() ); Map<String, Object> inputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputMap().entrySet()) { inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInput(inputMap); to.setType( from.getType() ); return to; } public DynamicForkJoinTaskListPb.DynamicForkJoinTaskList toProto(DynamicForkJoinTaskList from) { DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.Builder to = DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.newBuilder(); for (DynamicForkJoinTask elem : from.getDynamicTasks()) { to.addDynamicTasks( toProto(elem) ); } return to.build(); } public DynamicForkJoinTaskList fromProto( DynamicForkJoinTaskListPb.DynamicForkJoinTaskList from) { DynamicForkJoinTaskList to = new DynamicForkJoinTaskList(); to.setDynamicTasks( from.getDynamicTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); return to; } public EventExecutionPb.EventExecution toProto(EventExecution from) { EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); if (from.getId() != null) { to.setId( from.getId() ); } if (from.getMessageId() != null) { to.setMessageId( from.getMessageId() ); } if (from.getName() != null) { to.setName( from.getName() ); } if (from.getEvent() != null) { to.setEvent( from.getEvent() ); } to.setCreated( from.getCreated() ); if (from.getStatus() != null) { to.setStatus( toProto( from.getStatus() ) ); } if (from.getAction() != null) { to.setAction( toProto( from.getAction() ) ); } for (Map.Entry<String, Object> pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } return to.build(); } public EventExecution fromProto(EventExecutionPb.EventExecution from) { EventExecution to = new EventExecution(); to.setId( from.getId() ); to.setMessageId( from.getMessageId() ); to.setName( from.getName() ); to.setEvent( from.getEvent() ); to.setCreated( from.getCreated() ); to.setStatus( fromProto( from.getStatus() ) ); to.setAction( fromProto( from.getAction() ) ); Map<String, Object> outputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getOutputMap().entrySet()) { outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutput(outputMap); return to; } public EventExecutionPb.EventExecution.Status toProto(EventExecution.Status from) { EventExecutionPb.EventExecution.Status to; switch (from) { case IN_PROGRESS: to = EventExecutionPb.EventExecution.Status.IN_PROGRESS; break; case COMPLETED: to = EventExecutionPb.EventExecution.Status.COMPLETED; break; case FAILED: to = EventExecutionPb.EventExecution.Status.FAILED; break; case SKIPPED: to = EventExecutionPb.EventExecution.Status.SKIPPED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public EventExecution.Status fromProto(EventExecutionPb.EventExecution.Status from) { EventExecution.Status to; switch (from) { case IN_PROGRESS: to = EventExecution.Status.IN_PROGRESS; break; case COMPLETED: to = EventExecution.Status.COMPLETED; break; case FAILED: to = EventExecution.Status.FAILED; break; case SKIPPED: to = EventExecution.Status.SKIPPED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public EventHandlerPb.EventHandler toProto(EventHandler from) { EventHandlerPb.EventHandler.Builder to = EventHandlerPb.EventHandler.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } if (from.getEvent() != null) { to.setEvent( from.getEvent() ); } if (from.getCondition() != null) { to.setCondition( from.getCondition() ); } for (EventHandler.Action elem : from.getActions()) { to.addActions( toProto(elem) ); } to.setActive( from.isActive() ); if (from.getEvaluatorType() != null) { to.setEvaluatorType( from.getEvaluatorType() ); } return to.build(); } public EventHandler fromProto(EventHandlerPb.EventHandler from) { EventHandler to = new EventHandler(); to.setName( from.getName() ); to.setEvent( from.getEvent() ); to.setCondition( from.getCondition() ); to.setActions( from.getActionsList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setActive( from.getActive() ); to.setEvaluatorType( from.getEvaluatorType() ); return to; } public EventHandlerPb.EventHandler.StartWorkflow toProto(EventHandler.StartWorkflow from) { EventHandlerPb.EventHandler.StartWorkflow.Builder to = EventHandlerPb.EventHandler.StartWorkflow.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } if (from.getVersion() != null) { to.setVersion( from.getVersion() ); } if (from.getCorrelationId() != null) { to.setCorrelationId( from.getCorrelationId() ); } for (Map.Entry<String, Object> pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getInputMessage() != null) { to.setInputMessage( toProto( from.getInputMessage() ) ); } to.putAllTaskToDomain( from.getTaskToDomain() ); return to.build(); } public EventHandler.StartWorkflow fromProto(EventHandlerPb.EventHandler.StartWorkflow from) { EventHandler.StartWorkflow to = new EventHandler.StartWorkflow(); to.setName( from.getName() ); to.setVersion( from.getVersion() ); to.setCorrelationId( from.getCorrelationId() ); Map<String, Object> inputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputMap().entrySet()) { inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInput(inputMap); if (from.hasInputMessage()) { to.setInputMessage( fromProto( from.getInputMessage() ) ); } to.setTaskToDomain( from.getTaskToDomainMap() ); return to; } public EventHandlerPb.EventHandler.TaskDetails toProto(EventHandler.TaskDetails from) { EventHandlerPb.EventHandler.TaskDetails.Builder to = EventHandlerPb.EventHandler.TaskDetails.newBuilder(); if (from.getWorkflowId() != null) { to.setWorkflowId( from.getWorkflowId() ); } if (from.getTaskRefName() != null) { to.setTaskRefName( from.getTaskRefName() ); } for (Map.Entry<String, Object> pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getOutputMessage() != null) { to.setOutputMessage( toProto( from.getOutputMessage() ) ); } if (from.getTaskId() != null) { to.setTaskId( from.getTaskId() ); } return to.build(); } public EventHandler.TaskDetails fromProto(EventHandlerPb.EventHandler.TaskDetails from) { EventHandler.TaskDetails to = new EventHandler.TaskDetails(); to.setWorkflowId( from.getWorkflowId() ); to.setTaskRefName( from.getTaskRefName() ); Map<String, Object> outputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getOutputMap().entrySet()) { outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutput(outputMap); if (from.hasOutputMessage()) { to.setOutputMessage( fromProto( from.getOutputMessage() ) ); } to.setTaskId( from.getTaskId() ); return to; } public EventHandlerPb.EventHandler.Action toProto(EventHandler.Action from) { EventHandlerPb.EventHandler.Action.Builder to = EventHandlerPb.EventHandler.Action.newBuilder(); if (from.getAction() != null) { to.setAction( toProto( from.getAction() ) ); } if (from.getStart_workflow() != null) { to.setStartWorkflow( toProto( from.getStart_workflow() ) ); } if (from.getComplete_task() != null) { to.setCompleteTask( toProto( from.getComplete_task() ) ); } if (from.getFail_task() != null) { to.setFailTask( toProto( from.getFail_task() ) ); } to.setExpandInlineJson( from.isExpandInlineJSON() ); return to.build(); } public EventHandler.Action fromProto(EventHandlerPb.EventHandler.Action from) { EventHandler.Action to = new EventHandler.Action(); to.setAction( fromProto( from.getAction() ) ); if (from.hasStartWorkflow()) { to.setStart_workflow( fromProto( from.getStartWorkflow() ) ); } if (from.hasCompleteTask()) { to.setComplete_task( fromProto( from.getCompleteTask() ) ); } if (from.hasFailTask()) { to.setFail_task( fromProto( from.getFailTask() ) ); } to.setExpandInlineJSON( from.getExpandInlineJson() ); return to; } public EventHandlerPb.EventHandler.Action.Type toProto(EventHandler.Action.Type from) { EventHandlerPb.EventHandler.Action.Type to; switch (from) { case start_workflow: to = EventHandlerPb.EventHandler.Action.Type.START_WORKFLOW; break; case complete_task: to = EventHandlerPb.EventHandler.Action.Type.COMPLETE_TASK; break; case fail_task: to = EventHandlerPb.EventHandler.Action.Type.FAIL_TASK; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public EventHandler.Action.Type fromProto(EventHandlerPb.EventHandler.Action.Type from) { EventHandler.Action.Type to; switch (from) { case START_WORKFLOW: to = EventHandler.Action.Type.start_workflow; break; case COMPLETE_TASK: to = EventHandler.Action.Type.complete_task; break; case FAIL_TASK: to = EventHandler.Action.Type.fail_task; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public PollDataPb.PollData toProto(PollData from) { PollDataPb.PollData.Builder to = PollDataPb.PollData.newBuilder(); if (from.getQueueName() != null) { to.setQueueName( from.getQueueName() ); } if (from.getDomain() != null) { to.setDomain( from.getDomain() ); } if (from.getWorkerId() != null) { to.setWorkerId( from.getWorkerId() ); } to.setLastPollTime( from.getLastPollTime() ); return to.build(); } public PollData fromProto(PollDataPb.PollData from) { PollData to = new PollData(); to.setQueueName( from.getQueueName() ); to.setDomain( from.getDomain() ); to.setWorkerId( from.getWorkerId() ); to.setLastPollTime( from.getLastPollTime() ); return to; } public RerunWorkflowRequestPb.RerunWorkflowRequest toProto(RerunWorkflowRequest from) { RerunWorkflowRequestPb.RerunWorkflowRequest.Builder to = RerunWorkflowRequestPb.RerunWorkflowRequest.newBuilder(); if (from.getReRunFromWorkflowId() != null) { to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); } for (Map.Entry<String, Object> pair : from.getWorkflowInput().entrySet()) { to.putWorkflowInput( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getReRunFromTaskId() != null) { to.setReRunFromTaskId( from.getReRunFromTaskId() ); } for (Map.Entry<String, Object> pair : from.getTaskInput().entrySet()) { to.putTaskInput( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getCorrelationId() != null) { to.setCorrelationId( from.getCorrelationId() ); } return to.build(); } public RerunWorkflowRequest fromProto(RerunWorkflowRequestPb.RerunWorkflowRequest from) { RerunWorkflowRequest to = new RerunWorkflowRequest(); to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); Map<String, Object> workflowInputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getWorkflowInputMap().entrySet()) { workflowInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setWorkflowInput(workflowInputMap); to.setReRunFromTaskId( from.getReRunFromTaskId() ); Map<String, Object> taskInputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getTaskInputMap().entrySet()) { taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setTaskInput(taskInputMap); to.setCorrelationId( from.getCorrelationId() ); return to; } public SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { SkipTaskRequest to = new SkipTaskRequest(); Map<String, Object> taskInputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getTaskInputMap().entrySet()) { taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setTaskInput(taskInputMap); Map<String, Object> taskOutputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getTaskOutputMap().entrySet()) { taskOutputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setTaskOutput(taskOutputMap); if (from.hasTaskInputMessage()) { to.setTaskInputMessage( fromProto( from.getTaskInputMessage() ) ); } if (from.hasTaskOutputMessage()) { to.setTaskOutputMessage( fromProto( from.getTaskOutputMessage() ) ); } return to; } public StartWorkflowRequestPb.StartWorkflowRequest toProto(StartWorkflowRequest from) { StartWorkflowRequestPb.StartWorkflowRequest.Builder to = StartWorkflowRequestPb.StartWorkflowRequest.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } if (from.getVersion() != null) { to.setVersion( from.getVersion() ); } if (from.getCorrelationId() != null) { to.setCorrelationId( from.getCorrelationId() ); } for (Map.Entry<String, Object> pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } to.putAllTaskToDomain( from.getTaskToDomain() ); if (from.getWorkflowDef() != null) { to.setWorkflowDef( toProto( from.getWorkflowDef() ) ); } if (from.getExternalInputPayloadStoragePath() != null) { to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); } if (from.getPriority() != null) { to.setPriority( from.getPriority() ); } return to.build(); } public StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowRequest from) { StartWorkflowRequest to = new StartWorkflowRequest(); to.setName( from.getName() ); to.setVersion( from.getVersion() ); to.setCorrelationId( from.getCorrelationId() ); Map<String, Object> inputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputMap().entrySet()) { inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInput(inputMap); to.setTaskToDomain( from.getTaskToDomainMap() ); if (from.hasWorkflowDef()) { to.setWorkflowDef( fromProto( from.getWorkflowDef() ) ); } to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); to.setPriority( from.getPriority() ); return to; } public SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { SubWorkflowParamsPb.SubWorkflowParams.Builder to = SubWorkflowParamsPb.SubWorkflowParams.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } if (from.getVersion() != null) { to.setVersion( from.getVersion() ); } to.putAllTaskToDomain( from.getTaskToDomain() ); if (from.getWorkflowDefinition() != null) { to.setWorkflowDefinition( toProto( from.getWorkflowDefinition() ) ); } return to.build(); } public SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { SubWorkflowParams to = new SubWorkflowParams(); to.setName( from.getName() ); to.setVersion( from.getVersion() ); to.setTaskToDomain( from.getTaskToDomainMap() ); if (from.hasWorkflowDefinition()) { to.setWorkflowDefinition( fromProto( from.getWorkflowDefinition() ) ); } return to; } public TaskPb.Task toProto(Task from) { TaskPb.Task.Builder to = TaskPb.Task.newBuilder(); if (from.getTaskType() != null) { to.setTaskType( from.getTaskType() ); } if (from.getStatus() != null) { to.setStatus( toProto( from.getStatus() ) ); } for (Map.Entry<String, Object> pair : from.getInputData().entrySet()) { to.putInputData( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getReferenceTaskName() != null) { to.setReferenceTaskName( from.getReferenceTaskName() ); } to.setRetryCount( from.getRetryCount() ); to.setSeq( from.getSeq() ); if (from.getCorrelationId() != null) { to.setCorrelationId( from.getCorrelationId() ); } to.setPollCount( from.getPollCount() ); if (from.getTaskDefName() != null) { to.setTaskDefName( from.getTaskDefName() ); } to.setScheduledTime( from.getScheduledTime() ); to.setStartTime( from.getStartTime() ); to.setEndTime( from.getEndTime() ); to.setUpdateTime( from.getUpdateTime() ); to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); if (from.getRetriedTaskId() != null) { to.setRetriedTaskId( from.getRetriedTaskId() ); } to.setRetried( from.isRetried() ); to.setExecuted( from.isExecuted() ); to.setCallbackFromWorker( from.isCallbackFromWorker() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); if (from.getWorkflowInstanceId() != null) { to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); } if (from.getWorkflowType() != null) { to.setWorkflowType( from.getWorkflowType() ); } if (from.getTaskId() != null) { to.setTaskId( from.getTaskId() ); } if (from.getReasonForIncompletion() != null) { to.setReasonForIncompletion( from.getReasonForIncompletion() ); } to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); if (from.getWorkerId() != null) { to.setWorkerId( from.getWorkerId() ); } for (Map.Entry<String, Object> pair : from.getOutputData().entrySet()) { to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getWorkflowTask() != null) { to.setWorkflowTask( toProto( from.getWorkflowTask() ) ); } if (from.getDomain() != null) { to.setDomain( from.getDomain() ); } if (from.getInputMessage() != null) { to.setInputMessage( toProto( from.getInputMessage() ) ); } if (from.getOutputMessage() != null) { to.setOutputMessage( toProto( from.getOutputMessage() ) ); } to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); if (from.getExternalInputPayloadStoragePath() != null) { to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); } if (from.getExternalOutputPayloadStoragePath() != null) { to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); } to.setWorkflowPriority( from.getWorkflowPriority() ); if (from.getExecutionNameSpace() != null) { to.setExecutionNameSpace( from.getExecutionNameSpace() ); } if (from.getIsolationGroupId() != null) { to.setIsolationGroupId( from.getIsolationGroupId() ); } to.setIteration( from.getIteration() ); if (from.getSubWorkflowId() != null) { to.setSubWorkflowId( from.getSubWorkflowId() ); } to.setSubworkflowChanged( from.isSubworkflowChanged() ); return to.build(); } public Task fromProto(TaskPb.Task from) { Task to = new Task(); to.setTaskType( from.getTaskType() ); to.setStatus( fromProto( from.getStatus() ) ); Map<String, Object> inputDataMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputDataMap().entrySet()) { inputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInputData(inputDataMap); to.setReferenceTaskName( from.getReferenceTaskName() ); to.setRetryCount( from.getRetryCount() ); to.setSeq( from.getSeq() ); to.setCorrelationId( from.getCorrelationId() ); to.setPollCount( from.getPollCount() ); to.setTaskDefName( from.getTaskDefName() ); to.setScheduledTime( from.getScheduledTime() ); to.setStartTime( from.getStartTime() ); to.setEndTime( from.getEndTime() ); to.setUpdateTime( from.getUpdateTime() ); to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); to.setRetriedTaskId( from.getRetriedTaskId() ); to.setRetried( from.getRetried() ); to.setExecuted( from.getExecuted() ); to.setCallbackFromWorker( from.getCallbackFromWorker() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); to.setWorkflowType( from.getWorkflowType() ); to.setTaskId( from.getTaskId() ); to.setReasonForIncompletion( from.getReasonForIncompletion() ); to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); to.setWorkerId( from.getWorkerId() ); Map<String, Object> outputDataMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getOutputDataMap().entrySet()) { outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutputData(outputDataMap); if (from.hasWorkflowTask()) { to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); } to.setDomain( from.getDomain() ); if (from.hasInputMessage()) { to.setInputMessage( fromProto( from.getInputMessage() ) ); } if (from.hasOutputMessage()) { to.setOutputMessage( fromProto( from.getOutputMessage() ) ); } to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); to.setWorkflowPriority( from.getWorkflowPriority() ); to.setExecutionNameSpace( from.getExecutionNameSpace() ); to.setIsolationGroupId( from.getIsolationGroupId() ); to.setIteration( from.getIteration() ); to.setSubWorkflowId( from.getSubWorkflowId() ); to.setSubworkflowChanged( from.getSubworkflowChanged() ); return to; } public TaskPb.Task.Status toProto(Task.Status from) { TaskPb.Task.Status to; switch (from) { case IN_PROGRESS: to = TaskPb.Task.Status.IN_PROGRESS; break; case CANCELED: to = TaskPb.Task.Status.CANCELED; break; case FAILED: to = TaskPb.Task.Status.FAILED; break; case FAILED_WITH_TERMINAL_ERROR: to = TaskPb.Task.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = TaskPb.Task.Status.COMPLETED; break; case COMPLETED_WITH_ERRORS: to = TaskPb.Task.Status.COMPLETED_WITH_ERRORS; break; case SCHEDULED: to = TaskPb.Task.Status.SCHEDULED; break; case TIMED_OUT: to = TaskPb.Task.Status.TIMED_OUT; break; case SKIPPED: to = TaskPb.Task.Status.SKIPPED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public Task.Status fromProto(TaskPb.Task.Status from) { Task.Status to; switch (from) { case IN_PROGRESS: to = Task.Status.IN_PROGRESS; break; case CANCELED: to = Task.Status.CANCELED; break; case FAILED: to = Task.Status.FAILED; break; case FAILED_WITH_TERMINAL_ERROR: to = Task.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = Task.Status.COMPLETED; break; case COMPLETED_WITH_ERRORS: to = Task.Status.COMPLETED_WITH_ERRORS; break; case SCHEDULED: to = Task.Status.SCHEDULED; break; case TIMED_OUT: to = Task.Status.TIMED_OUT; break; case SKIPPED: to = Task.Status.SKIPPED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public TaskDefPb.TaskDef toProto(TaskDef from) { TaskDefPb.TaskDef.Builder to = TaskDefPb.TaskDef.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } if (from.getDescription() != null) { to.setDescription( from.getDescription() ); } to.setRetryCount( from.getRetryCount() ); to.setTimeoutSeconds( from.getTimeoutSeconds() ); to.addAllInputKeys( from.getInputKeys() ); to.addAllOutputKeys( from.getOutputKeys() ); if (from.getTimeoutPolicy() != null) { to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); } if (from.getRetryLogic() != null) { to.setRetryLogic( toProto( from.getRetryLogic() ) ); } to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); if (from.getConcurrentExecLimit() != null) { to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); } for (Map.Entry<String, Object> pair : from.getInputTemplate().entrySet()) { to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getRateLimitPerFrequency() != null) { to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); } if (from.getRateLimitFrequencyInSeconds() != null) { to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); } if (from.getIsolationGroupId() != null) { to.setIsolationGroupId( from.getIsolationGroupId() ); } if (from.getExecutionNameSpace() != null) { to.setExecutionNameSpace( from.getExecutionNameSpace() ); } if (from.getOwnerEmail() != null) { to.setOwnerEmail( from.getOwnerEmail() ); } if (from.getPollTimeoutSeconds() != null) { to.setPollTimeoutSeconds( from.getPollTimeoutSeconds() ); } if (from.getBackoffScaleFactor() != null) { to.setBackoffScaleFactor( from.getBackoffScaleFactor() ); } return to.build(); } public TaskDef fromProto(TaskDefPb.TaskDef from) { TaskDef to = new TaskDef(); to.setName( from.getName() ); to.setDescription( from.getDescription() ); to.setRetryCount( from.getRetryCount() ); to.setTimeoutSeconds( from.getTimeoutSeconds() ); to.setInputKeys( from.getInputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); to.setOutputKeys( from.getOutputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); to.setTimeoutPolicy( fromProto( from.getTimeoutPolicy() ) ); to.setRetryLogic( fromProto( from.getRetryLogic() ) ); to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); Map<String, Object> inputTemplateMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputTemplateMap().entrySet()) { inputTemplateMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInputTemplate(inputTemplateMap); to.setRateLimitPerFrequency( from.getRateLimitPerFrequency() ); to.setRateLimitFrequencyInSeconds( from.getRateLimitFrequencyInSeconds() ); to.setIsolationGroupId( from.getIsolationGroupId() ); to.setExecutionNameSpace( from.getExecutionNameSpace() ); to.setOwnerEmail( from.getOwnerEmail() ); to.setPollTimeoutSeconds( from.getPollTimeoutSeconds() ); to.setBackoffScaleFactor( from.getBackoffScaleFactor() ); return to; } public TaskDefPb.TaskDef.TimeoutPolicy toProto(TaskDef.TimeoutPolicy from) { TaskDefPb.TaskDef.TimeoutPolicy to; switch (from) { case RETRY: to = TaskDefPb.TaskDef.TimeoutPolicy.RETRY; break; case TIME_OUT_WF: to = TaskDefPb.TaskDef.TimeoutPolicy.TIME_OUT_WF; break; case ALERT_ONLY: to = TaskDefPb.TaskDef.TimeoutPolicy.ALERT_ONLY; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public TaskDef.TimeoutPolicy fromProto(TaskDefPb.TaskDef.TimeoutPolicy from) { TaskDef.TimeoutPolicy to; switch (from) { case RETRY: to = TaskDef.TimeoutPolicy.RETRY; break; case TIME_OUT_WF: to = TaskDef.TimeoutPolicy.TIME_OUT_WF; break; case ALERT_ONLY: to = TaskDef.TimeoutPolicy.ALERT_ONLY; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public TaskDefPb.TaskDef.RetryLogic toProto(TaskDef.RetryLogic from) { TaskDefPb.TaskDef.RetryLogic to; switch (from) { case FIXED: to = TaskDefPb.TaskDef.RetryLogic.FIXED; break; case EXPONENTIAL_BACKOFF: to = TaskDefPb.TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; case LINEAR_BACKOFF: to = TaskDefPb.TaskDef.RetryLogic.LINEAR_BACKOFF; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public TaskDef.RetryLogic fromProto(TaskDefPb.TaskDef.RetryLogic from) { TaskDef.RetryLogic to; switch (from) { case FIXED: to = TaskDef.RetryLogic.FIXED; break; case EXPONENTIAL_BACKOFF: to = TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; case LINEAR_BACKOFF: to = TaskDef.RetryLogic.LINEAR_BACKOFF; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public TaskExecLogPb.TaskExecLog toProto(TaskExecLog from) { TaskExecLogPb.TaskExecLog.Builder to = TaskExecLogPb.TaskExecLog.newBuilder(); if (from.getLog() != null) { to.setLog( from.getLog() ); } if (from.getTaskId() != null) { to.setTaskId( from.getTaskId() ); } to.setCreatedTime( from.getCreatedTime() ); return to.build(); } public TaskExecLog fromProto(TaskExecLogPb.TaskExecLog from) { TaskExecLog to = new TaskExecLog(); to.setLog( from.getLog() ); to.setTaskId( from.getTaskId() ); to.setCreatedTime( from.getCreatedTime() ); return to; } public TaskResultPb.TaskResult toProto(TaskResult from) { TaskResultPb.TaskResult.Builder to = TaskResultPb.TaskResult.newBuilder(); if (from.getWorkflowInstanceId() != null) { to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); } if (from.getTaskId() != null) { to.setTaskId( from.getTaskId() ); } if (from.getReasonForIncompletion() != null) { to.setReasonForIncompletion( from.getReasonForIncompletion() ); } to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); if (from.getWorkerId() != null) { to.setWorkerId( from.getWorkerId() ); } if (from.getStatus() != null) { to.setStatus( toProto( from.getStatus() ) ); } for (Map.Entry<String, Object> pair : from.getOutputData().entrySet()) { to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getOutputMessage() != null) { to.setOutputMessage( toProto( from.getOutputMessage() ) ); } return to.build(); } public TaskResult fromProto(TaskResultPb.TaskResult from) { TaskResult to = new TaskResult(); to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); to.setTaskId( from.getTaskId() ); to.setReasonForIncompletion( from.getReasonForIncompletion() ); to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); to.setWorkerId( from.getWorkerId() ); to.setStatus( fromProto( from.getStatus() ) ); Map<String, Object> outputDataMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getOutputDataMap().entrySet()) { outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutputData(outputDataMap); if (from.hasOutputMessage()) { to.setOutputMessage( fromProto( from.getOutputMessage() ) ); } return to; } public TaskResultPb.TaskResult.Status toProto(TaskResult.Status from) { TaskResultPb.TaskResult.Status to; switch (from) { case IN_PROGRESS: to = TaskResultPb.TaskResult.Status.IN_PROGRESS; break; case FAILED: to = TaskResultPb.TaskResult.Status.FAILED; break; case FAILED_WITH_TERMINAL_ERROR: to = TaskResultPb.TaskResult.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = TaskResultPb.TaskResult.Status.COMPLETED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public TaskResult.Status fromProto(TaskResultPb.TaskResult.Status from) { TaskResult.Status to; switch (from) { case IN_PROGRESS: to = TaskResult.Status.IN_PROGRESS; break; case FAILED: to = TaskResult.Status.FAILED; break; case FAILED_WITH_TERMINAL_ERROR: to = TaskResult.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = TaskResult.Status.COMPLETED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public TaskSummaryPb.TaskSummary toProto(TaskSummary from) { TaskSummaryPb.TaskSummary.Builder to = TaskSummaryPb.TaskSummary.newBuilder(); if (from.getWorkflowId() != null) { to.setWorkflowId( from.getWorkflowId() ); } if (from.getWorkflowType() != null) { to.setWorkflowType( from.getWorkflowType() ); } if (from.getCorrelationId() != null) { to.setCorrelationId( from.getCorrelationId() ); } if (from.getScheduledTime() != null) { to.setScheduledTime( from.getScheduledTime() ); } if (from.getStartTime() != null) { to.setStartTime( from.getStartTime() ); } if (from.getUpdateTime() != null) { to.setUpdateTime( from.getUpdateTime() ); } if (from.getEndTime() != null) { to.setEndTime( from.getEndTime() ); } if (from.getStatus() != null) { to.setStatus( toProto( from.getStatus() ) ); } if (from.getReasonForIncompletion() != null) { to.setReasonForIncompletion( from.getReasonForIncompletion() ); } to.setExecutionTime( from.getExecutionTime() ); to.setQueueWaitTime( from.getQueueWaitTime() ); if (from.getTaskDefName() != null) { to.setTaskDefName( from.getTaskDefName() ); } if (from.getTaskType() != null) { to.setTaskType( from.getTaskType() ); } if (from.getInput() != null) { to.setInput( from.getInput() ); } if (from.getOutput() != null) { to.setOutput( from.getOutput() ); } if (from.getTaskId() != null) { to.setTaskId( from.getTaskId() ); } if (from.getExternalInputPayloadStoragePath() != null) { to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); } if (from.getExternalOutputPayloadStoragePath() != null) { to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); } to.setWorkflowPriority( from.getWorkflowPriority() ); if (from.getDomain() != null) { to.setDomain( from.getDomain() ); } return to.build(); } public TaskSummary fromProto(TaskSummaryPb.TaskSummary from) { TaskSummary to = new TaskSummary(); to.setWorkflowId( from.getWorkflowId() ); to.setWorkflowType( from.getWorkflowType() ); to.setCorrelationId( from.getCorrelationId() ); to.setScheduledTime( from.getScheduledTime() ); to.setStartTime( from.getStartTime() ); to.setUpdateTime( from.getUpdateTime() ); to.setEndTime( from.getEndTime() ); to.setStatus( fromProto( from.getStatus() ) ); to.setReasonForIncompletion( from.getReasonForIncompletion() ); to.setExecutionTime( from.getExecutionTime() ); to.setQueueWaitTime( from.getQueueWaitTime() ); to.setTaskDefName( from.getTaskDefName() ); to.setTaskType( from.getTaskType() ); to.setInput( from.getInput() ); to.setOutput( from.getOutput() ); to.setTaskId( from.getTaskId() ); to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); to.setWorkflowPriority( from.getWorkflowPriority() ); to.setDomain( from.getDomain() ); return to; } public WorkflowPb.Workflow toProto(Workflow from) { WorkflowPb.Workflow.Builder to = WorkflowPb.Workflow.newBuilder(); if (from.getStatus() != null) { to.setStatus( toProto( from.getStatus() ) ); } to.setEndTime( from.getEndTime() ); if (from.getWorkflowId() != null) { to.setWorkflowId( from.getWorkflowId() ); } if (from.getParentWorkflowId() != null) { to.setParentWorkflowId( from.getParentWorkflowId() ); } if (from.getParentWorkflowTaskId() != null) { to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); } for (Task elem : from.getTasks()) { to.addTasks( toProto(elem) ); } for (Map.Entry<String, Object> pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } for (Map.Entry<String, Object> pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getCorrelationId() != null) { to.setCorrelationId( from.getCorrelationId() ); } if (from.getReRunFromWorkflowId() != null) { to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); } if (from.getReasonForIncompletion() != null) { to.setReasonForIncompletion( from.getReasonForIncompletion() ); } if (from.getEvent() != null) { to.setEvent( from.getEvent() ); } to.putAllTaskToDomain( from.getTaskToDomain() ); to.addAllFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); if (from.getWorkflowDefinition() != null) { to.setWorkflowDefinition( toProto( from.getWorkflowDefinition() ) ); } if (from.getExternalInputPayloadStoragePath() != null) { to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); } if (from.getExternalOutputPayloadStoragePath() != null) { to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); } to.setPriority( from.getPriority() ); for (Map.Entry<String, Object> pair : from.getVariables().entrySet()) { to.putVariables( pair.getKey(), toProto( pair.getValue() ) ); } to.setLastRetriedTime( from.getLastRetriedTime() ); to.addAllFailedTaskNames( from.getFailedTaskNames() ); return to.build(); } public Workflow fromProto(WorkflowPb.Workflow from) { Workflow to = new Workflow(); to.setStatus( fromProto( from.getStatus() ) ); to.setEndTime( from.getEndTime() ); to.setWorkflowId( from.getWorkflowId() ); to.setParentWorkflowId( from.getParentWorkflowId() ); to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); Map<String, Object> inputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputMap().entrySet()) { inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInput(inputMap); Map<String, Object> outputMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getOutputMap().entrySet()) { outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutput(outputMap); to.setCorrelationId( from.getCorrelationId() ); to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); to.setReasonForIncompletion( from.getReasonForIncompletion() ); to.setEvent( from.getEvent() ); to.setTaskToDomain( from.getTaskToDomainMap() ); to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNamesList().stream().collect(Collectors.toCollection(HashSet::new)) ); if (from.hasWorkflowDefinition()) { to.setWorkflowDefinition( fromProto( from.getWorkflowDefinition() ) ); } to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); to.setPriority( from.getPriority() ); Map<String, Object> variablesMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getVariablesMap().entrySet()) { variablesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setVariables(variablesMap); to.setLastRetriedTime( from.getLastRetriedTime() ); to.setFailedTaskNames( from.getFailedTaskNamesList().stream().collect(Collectors.toCollection(HashSet::new)) ); return to; } public WorkflowPb.Workflow.WorkflowStatus toProto(Workflow.WorkflowStatus from) { WorkflowPb.Workflow.WorkflowStatus to; switch (from) { case RUNNING: to = WorkflowPb.Workflow.WorkflowStatus.RUNNING; break; case COMPLETED: to = WorkflowPb.Workflow.WorkflowStatus.COMPLETED; break; case FAILED: to = WorkflowPb.Workflow.WorkflowStatus.FAILED; break; case TIMED_OUT: to = WorkflowPb.Workflow.WorkflowStatus.TIMED_OUT; break; case TERMINATED: to = WorkflowPb.Workflow.WorkflowStatus.TERMINATED; break; case PAUSED: to = WorkflowPb.Workflow.WorkflowStatus.PAUSED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public Workflow.WorkflowStatus fromProto(WorkflowPb.Workflow.WorkflowStatus from) { Workflow.WorkflowStatus to; switch (from) { case RUNNING: to = Workflow.WorkflowStatus.RUNNING; break; case COMPLETED: to = Workflow.WorkflowStatus.COMPLETED; break; case FAILED: to = Workflow.WorkflowStatus.FAILED; break; case TIMED_OUT: to = Workflow.WorkflowStatus.TIMED_OUT; break; case TERMINATED: to = Workflow.WorkflowStatus.TERMINATED; break; case PAUSED: to = Workflow.WorkflowStatus.PAUSED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { WorkflowDefPb.WorkflowDef.Builder to = WorkflowDefPb.WorkflowDef.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } if (from.getDescription() != null) { to.setDescription( from.getDescription() ); } to.setVersion( from.getVersion() ); for (WorkflowTask elem : from.getTasks()) { to.addTasks( toProto(elem) ); } to.addAllInputParameters( from.getInputParameters() ); for (Map.Entry<String, Object> pair : from.getOutputParameters().entrySet()) { to.putOutputParameters( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getFailureWorkflow() != null) { to.setFailureWorkflow( from.getFailureWorkflow() ); } to.setSchemaVersion( from.getSchemaVersion() ); to.setRestartable( from.isRestartable() ); to.setWorkflowStatusListenerEnabled( from.isWorkflowStatusListenerEnabled() ); if (from.getOwnerEmail() != null) { to.setOwnerEmail( from.getOwnerEmail() ); } if (from.getTimeoutPolicy() != null) { to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); } to.setTimeoutSeconds( from.getTimeoutSeconds() ); for (Map.Entry<String, Object> pair : from.getVariables().entrySet()) { to.putVariables( pair.getKey(), toProto( pair.getValue() ) ); } for (Map.Entry<String, Object> pair : from.getInputTemplate().entrySet()) { to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); } return to.build(); } public WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { WorkflowDef to = new WorkflowDef(); to.setName( from.getName() ); to.setDescription( from.getDescription() ); to.setVersion( from.getVersion() ); to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setInputParameters( from.getInputParametersList().stream().collect(Collectors.toCollection(ArrayList::new)) ); Map<String, Object> outputParametersMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getOutputParametersMap().entrySet()) { outputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutputParameters(outputParametersMap); to.setFailureWorkflow( from.getFailureWorkflow() ); to.setSchemaVersion( from.getSchemaVersion() ); to.setRestartable( from.getRestartable() ); to.setWorkflowStatusListenerEnabled( from.getWorkflowStatusListenerEnabled() ); to.setOwnerEmail( from.getOwnerEmail() ); to.setTimeoutPolicy( fromProto( from.getTimeoutPolicy() ) ); to.setTimeoutSeconds( from.getTimeoutSeconds() ); Map<String, Object> variablesMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getVariablesMap().entrySet()) { variablesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setVariables(variablesMap); Map<String, Object> inputTemplateMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputTemplateMap().entrySet()) { inputTemplateMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInputTemplate(inputTemplateMap); return to; } public WorkflowDefPb.WorkflowDef.TimeoutPolicy toProto(WorkflowDef.TimeoutPolicy from) { WorkflowDefPb.WorkflowDef.TimeoutPolicy to; switch (from) { case TIME_OUT_WF: to = WorkflowDefPb.WorkflowDef.TimeoutPolicy.TIME_OUT_WF; break; case ALERT_ONLY: to = WorkflowDefPb.WorkflowDef.TimeoutPolicy.ALERT_ONLY; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public WorkflowDef.TimeoutPolicy fromProto(WorkflowDefPb.WorkflowDef.TimeoutPolicy from) { WorkflowDef.TimeoutPolicy to; switch (from) { case TIME_OUT_WF: to = WorkflowDef.TimeoutPolicy.TIME_OUT_WF; break; case ALERT_ONLY: to = WorkflowDef.TimeoutPolicy.ALERT_ONLY; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); } return to; } public WorkflowDefSummaryPb.WorkflowDefSummary toProto(WorkflowDefSummary from) { WorkflowDefSummaryPb.WorkflowDefSummary.Builder to = WorkflowDefSummaryPb.WorkflowDefSummary.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } to.setVersion( from.getVersion() ); if (from.getCreateTime() != null) { to.setCreateTime( from.getCreateTime() ); } return to.build(); } public WorkflowDefSummary fromProto(WorkflowDefSummaryPb.WorkflowDefSummary from) { WorkflowDefSummary to = new WorkflowDefSummary(); to.setName( from.getName() ); to.setVersion( from.getVersion() ); to.setCreateTime( from.getCreateTime() ); return to; } public WorkflowSummaryPb.WorkflowSummary toProto(WorkflowSummary from) { WorkflowSummaryPb.WorkflowSummary.Builder to = WorkflowSummaryPb.WorkflowSummary.newBuilder(); if (from.getWorkflowType() != null) { to.setWorkflowType( from.getWorkflowType() ); } to.setVersion( from.getVersion() ); if (from.getWorkflowId() != null) { to.setWorkflowId( from.getWorkflowId() ); } if (from.getCorrelationId() != null) { to.setCorrelationId( from.getCorrelationId() ); } if (from.getStartTime() != null) { to.setStartTime( from.getStartTime() ); } if (from.getUpdateTime() != null) { to.setUpdateTime( from.getUpdateTime() ); } if (from.getEndTime() != null) { to.setEndTime( from.getEndTime() ); } if (from.getStatus() != null) { to.setStatus( toProto( from.getStatus() ) ); } if (from.getInput() != null) { to.setInput( from.getInput() ); } if (from.getOutput() != null) { to.setOutput( from.getOutput() ); } if (from.getReasonForIncompletion() != null) { to.setReasonForIncompletion( from.getReasonForIncompletion() ); } to.setExecutionTime( from.getExecutionTime() ); if (from.getEvent() != null) { to.setEvent( from.getEvent() ); } if (from.getFailedReferenceTaskNames() != null) { to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); } if (from.getExternalInputPayloadStoragePath() != null) { to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); } if (from.getExternalOutputPayloadStoragePath() != null) { to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); } to.setPriority( from.getPriority() ); to.addAllFailedTaskNames( from.getFailedTaskNames() ); return to.build(); } public WorkflowSummary fromProto(WorkflowSummaryPb.WorkflowSummary from) { WorkflowSummary to = new WorkflowSummary(); to.setWorkflowType( from.getWorkflowType() ); to.setVersion( from.getVersion() ); to.setWorkflowId( from.getWorkflowId() ); to.setCorrelationId( from.getCorrelationId() ); to.setStartTime( from.getStartTime() ); to.setUpdateTime( from.getUpdateTime() ); to.setEndTime( from.getEndTime() ); to.setStatus( fromProto( from.getStatus() ) ); to.setInput( from.getInput() ); to.setOutput( from.getOutput() ); to.setReasonForIncompletion( from.getReasonForIncompletion() ); to.setExecutionTime( from.getExecutionTime() ); to.setEvent( from.getEvent() ); to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); to.setExternalInputPayloadStoragePath( from.getExternalInputPayloadStoragePath() ); to.setExternalOutputPayloadStoragePath( from.getExternalOutputPayloadStoragePath() ); to.setPriority( from.getPriority() ); to.setFailedTaskNames( from.getFailedTaskNamesList().stream().collect(Collectors.toCollection(HashSet::new)) ); return to; } public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(); if (from.getName() != null) { to.setName( from.getName() ); } if (from.getTaskReferenceName() != null) { to.setTaskReferenceName( from.getTaskReferenceName() ); } if (from.getDescription() != null) { to.setDescription( from.getDescription() ); } for (Map.Entry<String, Object> pair : from.getInputParameters().entrySet()) { to.putInputParameters( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getType() != null) { to.setType( from.getType() ); } if (from.getDynamicTaskNameParam() != null) { to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); } if (from.getCaseValueParam() != null) { to.setCaseValueParam( from.getCaseValueParam() ); } if (from.getCaseExpression() != null) { to.setCaseExpression( from.getCaseExpression() ); } if (from.getScriptExpression() != null) { to.setScriptExpression( from.getScriptExpression() ); } for (Map.Entry<String, List<WorkflowTask>> pair : from.getDecisionCases().entrySet()) { to.putDecisionCases( pair.getKey(), toProto( pair.getValue() ) ); } if (from.getDynamicForkTasksParam() != null) { to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); } if (from.getDynamicForkTasksInputParamName() != null) { to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); } for (WorkflowTask elem : from.getDefaultCase()) { to.addDefaultCase( toProto(elem) ); } for (List<WorkflowTask> elem : from.getForkTasks()) { to.addForkTasks( toProto(elem) ); } to.setStartDelay( from.getStartDelay() ); if (from.getSubWorkflowParam() != null) { to.setSubWorkflowParam( toProto( from.getSubWorkflowParam() ) ); } to.addAllJoinOn( from.getJoinOn() ); if (from.getSink() != null) { to.setSink( from.getSink() ); } to.setOptional( from.isOptional() ); if (from.getTaskDefinition() != null) { to.setTaskDefinition( toProto( from.getTaskDefinition() ) ); } if (from.isRateLimited() != null) { to.setRateLimited( from.isRateLimited() ); } to.addAllDefaultExclusiveJoinTask( from.getDefaultExclusiveJoinTask() ); if (from.isAsyncComplete() != null) { to.setAsyncComplete( from.isAsyncComplete() ); } if (from.getLoopCondition() != null) { to.setLoopCondition( from.getLoopCondition() ); } for (WorkflowTask elem : from.getLoopOver()) { to.addLoopOver( toProto(elem) ); } if (from.getRetryCount() != null) { to.setRetryCount( from.getRetryCount() ); } if (from.getEvaluatorType() != null) { to.setEvaluatorType( from.getEvaluatorType() ); } if (from.getExpression() != null) { to.setExpression( from.getExpression() ); } return to.build(); } public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { WorkflowTask to = new WorkflowTask(); to.setName( from.getName() ); to.setTaskReferenceName( from.getTaskReferenceName() ); to.setDescription( from.getDescription() ); Map<String, Object> inputParametersMap = new HashMap<String, Object>(); for (Map.Entry<String, Value> pair : from.getInputParametersMap().entrySet()) { inputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInputParameters(inputParametersMap); to.setType( from.getType() ); to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); to.setCaseValueParam( from.getCaseValueParam() ); to.setCaseExpression( from.getCaseExpression() ); to.setScriptExpression( from.getScriptExpression() ); Map<String, List<WorkflowTask>> decisionCasesMap = new HashMap<String, List<WorkflowTask>>(); for (Map.Entry<String, WorkflowTaskPb.WorkflowTask.WorkflowTaskList> pair : from.getDecisionCasesMap().entrySet()) { decisionCasesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setDecisionCases(decisionCasesMap); to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); to.setDefaultCase( from.getDefaultCaseList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setForkTasks( from.getForkTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setStartDelay( from.getStartDelay() ); if (from.hasSubWorkflowParam()) { to.setSubWorkflowParam( fromProto( from.getSubWorkflowParam() ) ); } to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); to.setSink( from.getSink() ); to.setOptional( from.getOptional() ); if (from.hasTaskDefinition()) { to.setTaskDefinition( fromProto( from.getTaskDefinition() ) ); } to.setRateLimited( from.getRateLimited() ); to.setDefaultExclusiveJoinTask( from.getDefaultExclusiveJoinTaskList().stream().collect(Collectors.toCollection(ArrayList::new)) ); to.setAsyncComplete( from.getAsyncComplete() ); to.setLoopCondition( from.getLoopCondition() ); to.setLoopOver( from.getLoopOverList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setRetryCount( from.getRetryCount() ); to.setEvaluatorType( from.getEvaluatorType() ); to.setExpression( from.getExpression() ); return to; } public abstract WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List<WorkflowTask> in); public abstract List<WorkflowTask> fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList in); public abstract Value toProto(Object in); public abstract Object fromProto(Value in); public abstract Any toProto(Any in); public abstract Any fromProto(Any in); }
6,798
0
Create_ds/conductor/grpc/src/main/java/com/netflix/conductor
Create_ds/conductor/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc; import com.google.protobuf.Any; import com.google.protobuf.ListValue; import com.google.protobuf.NullValue; import com.google.protobuf.Struct; import com.google.protobuf.Value; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.proto.WorkflowTaskPb; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; /** * ProtoMapper implements conversion code between the internal models * used by Conductor (POJOs) and their corresponding equivalents in * the exposed Protocol Buffers interface. * * The vast majority of the mapping logic is implemented in the autogenerated * {@link AbstractProtoMapper} class. This class only implements the custom * logic for objects that need to be special cased in the API. */ public final class ProtoMapper extends AbstractProtoMapper { public static final ProtoMapper INSTANCE = new ProtoMapper(); private static final int NO_RETRY_VALUE = -1; private ProtoMapper() {} /** * Convert an {@link Object} instance into its equivalent {@link Value} * ProtoBuf object. * * The {@link Value} ProtoBuf message is a variant type that can define any * value representable as a native JSON type. Consequently, this method expects * the given {@link Object} instance to be a Java object instance of JSON-native * value, namely: null, {@link Boolean}, {@link Double}, {@link String}, * {@link Map}, {@link List}. * * Any other values will cause an exception to be thrown. * See {@link ProtoMapper#fromProto(Value)} for the reverse mapping. * * @param val a Java object that can be represented natively in JSON * @return an instance of a {@link Value} ProtoBuf message */ @Override public Value toProto(Object val) { Value.Builder builder = Value.newBuilder(); if (val == null) { builder.setNullValue(NullValue.NULL_VALUE); } else if (val instanceof Boolean) { builder.setBoolValue((Boolean) val); } else if (val instanceof Double) { builder.setNumberValue((Double) val); } else if (val instanceof String) { builder.setStringValue((String) val); } else if (val instanceof Map) { Map<String, Object> map = (Map<String, Object>) val; Struct.Builder struct = Struct.newBuilder(); for (Map.Entry<String, Object> pair : map.entrySet()) { struct.putFields(pair.getKey(), toProto(pair.getValue())); } builder.setStructValue(struct.build()); } else if (val instanceof List) { ListValue.Builder list = ListValue.newBuilder(); for (Object obj : (List<Object>)val) { list.addValues(toProto(obj)); } builder.setListValue(list.build()); } else { throw new ClassCastException("cannot map to Value type: "+val); } return builder.build(); } /** * Convert a ProtoBuf {@link Value} message into its native Java object * equivalent. * * See {@link ProtoMapper#toProto(Object)} for the reverse mapping and the * possible values that can be returned from this method. * * @param any an instance of a ProtoBuf {@link Value} message * @return a native Java object representing the value */ @Override public Object fromProto(Value any) { switch (any.getKindCase()) { case NULL_VALUE: return null; case BOOL_VALUE: return any.getBoolValue(); case NUMBER_VALUE: return any.getNumberValue(); case STRING_VALUE: return any.getStringValue(); case STRUCT_VALUE: Struct struct = any.getStructValue(); Map<String, Object> map = new HashMap<>(); for (Map.Entry<String, Value> pair : struct.getFieldsMap().entrySet()) { map.put(pair.getKey(), fromProto(pair.getValue())); } return map; case LIST_VALUE: List<Object> list = new ArrayList<>(); for (Value val : any.getListValue().getValuesList()) { list.add(fromProto(val)); } return list; default: throw new ClassCastException("unset Value element: "+any); } } /** * Convert a WorkflowTaskList message wrapper into a {@link List} instance * with its contents. * * @param list an instance of a ProtoBuf message * @return a list with the contents of the message */ @Override public List<WorkflowTask> fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList list) { return list.getTasksList().stream().map(this::fromProto).collect(Collectors.toList()); } @Override public WorkflowTaskPb.WorkflowTask toProto(final WorkflowTask from) { final WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(super.toProto(from)); if (from.getRetryCount() == null) { to.setRetryCount(NO_RETRY_VALUE); } return to.build(); } @Override public WorkflowTask fromProto(final WorkflowTaskPb.WorkflowTask from) { final WorkflowTask workflowTask = super.fromProto(from); if (from.getRetryCount() == NO_RETRY_VALUE) { workflowTask.setRetryCount(null); } return workflowTask; } /** * Convert a list of {@link WorkflowTask} instances into a ProtoBuf wrapper object. * * @param list a list of {@link WorkflowTask} instances * @return a ProtoBuf message wrapping the contents of the list */ @Override public WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List<WorkflowTask> list) { return WorkflowTaskPb.WorkflowTask.WorkflowTaskList.newBuilder() .addAllTasks(list.stream().map(this::toProto)::iterator) .build(); } @Override public Any toProto(Any in) { return in; } @Override public Any fromProto(Any in) { return in; } }
6,799