index
int64
0
0
repo_id
stringlengths
26
205
file_path
stringlengths
51
246
content
stringlengths
8
433k
__index_level_0__
int64
0
10k
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/run/SearchResult.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.run; import java.util.List; public class SearchResult<T> { private long totalHits; private List<T> results; public SearchResult() {} public SearchResult(long totalHits, List<T> results) { super(); this.totalHits = totalHits; this.results = results; } /** * @return the totalHits */ public long getTotalHits() { return totalHits; } /** * @return the results */ public List<T> getResults() { return results; } /** * @param totalHits the totalHits to set */ public void setTotalHits(long totalHits) { this.totalHits = totalHits; } /** * @param results the results to set */ public void setResults(List<T> results) { this.results = results; } }
6,900
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/run/WorkflowTestRequest.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.run; import java.util.HashMap; import java.util.List; import java.util.Map; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; public class WorkflowTestRequest extends StartWorkflowRequest { // Map of task reference name to mock output for the task private Map<String, List<TaskMock>> taskRefToMockOutput = new HashMap<>(); // If there are sub-workflows inside the workflow // The map of task reference name to the mock for the sub-workflow private Map<String, WorkflowTestRequest> subWorkflowTestRequest = new HashMap<>(); public static class TaskMock { private TaskResult.Status status = TaskResult.Status.COMPLETED; private Map<String, Object> output; private long executionTime; // Time in millis for the execution of the task. Useful for // simulating timeout conditions private long queueWaitTime; // Time in millis for the wait time in the queue. public TaskMock() {} public TaskMock(TaskResult.Status status, Map<String, Object> output) { this.status = status; this.output = output; } public TaskResult.Status getStatus() { return status; } public void setStatus(TaskResult.Status status) { this.status = status; } public Map<String, Object> getOutput() { return output; } public void setOutput(Map<String, Object> output) { this.output = output; } public long getExecutionTime() { return executionTime; } public void setExecutionTime(long executionTime) { this.executionTime = executionTime; } public long getQueueWaitTime() { return queueWaitTime; } public void setQueueWaitTime(long queueWaitTime) { this.queueWaitTime = queueWaitTime; } } public Map<String, List<TaskMock>> getTaskRefToMockOutput() { return taskRefToMockOutput; } public void setTaskRefToMockOutput(Map<String, List<TaskMock>> taskRefToMockOutput) { this.taskRefToMockOutput = taskRefToMockOutput; } public Map<String, WorkflowTestRequest> getSubWorkflowTestRequest() { return subWorkflowTestRequest; } public void setSubWorkflowTestRequest(Map<String, WorkflowTestRequest> subWorkflowTestRequest) { this.subWorkflowTestRequest = subWorkflowTestRequest; } }
6,901
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.run; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Objects; import java.util.TimeZone; import org.apache.commons.lang3.StringUtils; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.utils.SummaryUtil; @ProtoMessage public class TaskSummary { /** The time should be stored as GMT */ private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); @ProtoField(id = 1) private String workflowId; @ProtoField(id = 2) private String workflowType; @ProtoField(id = 3) private String correlationId; @ProtoField(id = 4) private String scheduledTime; @ProtoField(id = 5) private String startTime; @ProtoField(id = 6) private String updateTime; @ProtoField(id = 7) private String endTime; @ProtoField(id = 8) private Task.Status status; @ProtoField(id = 9) private String reasonForIncompletion; @ProtoField(id = 10) private long executionTime; @ProtoField(id = 11) private long queueWaitTime; @ProtoField(id = 12) private String taskDefName; @ProtoField(id = 13) private String taskType; @ProtoField(id = 14) private String input; @ProtoField(id = 15) private String output; @ProtoField(id = 16) private String taskId; @ProtoField(id = 17) private String externalInputPayloadStoragePath; @ProtoField(id = 18) private String externalOutputPayloadStoragePath; @ProtoField(id = 19) private int workflowPriority; @ProtoField(id = 20) private String domain; public TaskSummary() {} public TaskSummary(Task task) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); sdf.setTimeZone(GMT); this.taskId = task.getTaskId(); this.taskDefName = task.getTaskDefName(); this.taskType = task.getTaskType(); this.workflowId = task.getWorkflowInstanceId(); this.workflowType = task.getWorkflowType(); this.workflowPriority = task.getWorkflowPriority(); this.correlationId = task.getCorrelationId(); this.scheduledTime = sdf.format(new Date(task.getScheduledTime())); this.startTime = sdf.format(new Date(task.getStartTime())); this.updateTime = sdf.format(new Date(task.getUpdateTime())); this.endTime = sdf.format(new Date(task.getEndTime())); this.status = task.getStatus(); this.reasonForIncompletion = task.getReasonForIncompletion(); this.queueWaitTime = task.getQueueWaitTime(); this.domain = task.getDomain(); if (task.getInputData() != null) { this.input = SummaryUtil.serializeInputOutput(task.getInputData()); } if (task.getOutputData() != null) { this.output = SummaryUtil.serializeInputOutput(task.getOutputData()); } if (task.getEndTime() > 0) { this.executionTime = task.getEndTime() - task.getStartTime(); } if (StringUtils.isNotBlank(task.getExternalInputPayloadStoragePath())) { this.externalInputPayloadStoragePath = task.getExternalInputPayloadStoragePath(); } if (StringUtils.isNotBlank(task.getExternalOutputPayloadStoragePath())) { this.externalOutputPayloadStoragePath = task.getExternalOutputPayloadStoragePath(); } } /** * @return the workflowId */ public String getWorkflowId() { return workflowId; } /** * @param workflowId the workflowId to set */ public void setWorkflowId(String workflowId) { this.workflowId = workflowId; } /** * @return the workflowType */ public String getWorkflowType() { return workflowType; } /** * @param workflowType the workflowType to set */ public void setWorkflowType(String workflowType) { this.workflowType = workflowType; } /** * @return the correlationId */ public String getCorrelationId() { return correlationId; } /** * @param correlationId the correlationId to set */ public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } /** * @return the scheduledTime */ public String getScheduledTime() { return scheduledTime; } /** * @param scheduledTime the scheduledTime to set */ public void setScheduledTime(String scheduledTime) { this.scheduledTime = scheduledTime; } /** * @return the startTime */ public String getStartTime() { return startTime; } /** * @param startTime the startTime to set */ public void setStartTime(String startTime) { this.startTime = startTime; } /** * @return the updateTime */ public String getUpdateTime() { return updateTime; } /** * @param updateTime the updateTime to set */ public void setUpdateTime(String updateTime) { this.updateTime = updateTime; } /** * @return the endTime */ public String getEndTime() { return endTime; } /** * @param endTime the endTime to set */ public void setEndTime(String endTime) { this.endTime = endTime; } /** * @return the status */ public Status getStatus() { return status; } /** * @param status the status to set */ public void setStatus(Status status) { this.status = status; } /** * @return the reasonForIncompletion */ public String getReasonForIncompletion() { return reasonForIncompletion; } /** * @param reasonForIncompletion the reasonForIncompletion to set */ public void setReasonForIncompletion(String reasonForIncompletion) { this.reasonForIncompletion = reasonForIncompletion; } /** * @return the executionTime */ public long getExecutionTime() { return executionTime; } /** * @param executionTime the executionTime to set */ public void setExecutionTime(long executionTime) { this.executionTime = executionTime; } /** * @return the queueWaitTime */ public long getQueueWaitTime() { return queueWaitTime; } /** * @param queueWaitTime the queueWaitTime to set */ public void setQueueWaitTime(long queueWaitTime) { this.queueWaitTime = queueWaitTime; } /** * @return the taskDefName */ public String getTaskDefName() { return taskDefName; } /** * @param taskDefName the taskDefName to set */ public void setTaskDefName(String taskDefName) { this.taskDefName = taskDefName; } /** * @return the taskType */ public String getTaskType() { return taskType; } /** * @param taskType the taskType to set */ public void setTaskType(String taskType) { this.taskType = taskType; } /** * @return input to the task */ public String getInput() { return input; } /** * @param input input to the task */ public void setInput(String input) { this.input = input; } /** * @return output of the task */ public String getOutput() { return output; } /** * @param output Task output */ public void setOutput(String output) { this.output = output; } /** * @return the taskId */ public String getTaskId() { return taskId; } /** * @param taskId the taskId to set */ public void setTaskId(String taskId) { this.taskId = taskId; } /** * @return the external storage path for the task input payload */ public String getExternalInputPayloadStoragePath() { return externalInputPayloadStoragePath; } /** * @param externalInputPayloadStoragePath the external storage path where the task input payload * is stored */ public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; } /** * @return the external storage path for the task output payload */ public String getExternalOutputPayloadStoragePath() { return externalOutputPayloadStoragePath; } /** * @param externalOutputPayloadStoragePath the external storage path where the task output * payload is stored */ public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; } /** * @return the priority defined on workflow */ public int getWorkflowPriority() { return workflowPriority; } /** * @param workflowPriority Priority defined for workflow */ public void setWorkflowPriority(int workflowPriority) { this.workflowPriority = workflowPriority; } /** * @return the domain that the task was scheduled in */ public String getDomain() { return domain; } /** * @param domain The domain that the task was scheduled in */ public void setDomain(String domain) { this.domain = domain; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TaskSummary that = (TaskSummary) o; return getExecutionTime() == that.getExecutionTime() && getQueueWaitTime() == that.getQueueWaitTime() && getWorkflowPriority() == that.getWorkflowPriority() && getWorkflowId().equals(that.getWorkflowId()) && getWorkflowType().equals(that.getWorkflowType()) && Objects.equals(getCorrelationId(), that.getCorrelationId()) && getScheduledTime().equals(that.getScheduledTime()) && Objects.equals(getStartTime(), that.getStartTime()) && Objects.equals(getUpdateTime(), that.getUpdateTime()) && Objects.equals(getEndTime(), that.getEndTime()) && getStatus() == that.getStatus() && Objects.equals(getReasonForIncompletion(), that.getReasonForIncompletion()) && Objects.equals(getTaskDefName(), that.getTaskDefName()) && getTaskType().equals(that.getTaskType()) && getTaskId().equals(that.getTaskId()) && Objects.equals(getDomain(), that.getDomain()); } @Override public int hashCode() { return Objects.hash( getWorkflowId(), getWorkflowType(), getCorrelationId(), getScheduledTime(), getStartTime(), getUpdateTime(), getEndTime(), getStatus(), getReasonForIncompletion(), getExecutionTime(), getQueueWaitTime(), getTaskDefName(), getTaskType(), getTaskId(), getWorkflowPriority(), getDomain()); } }
6,902
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/run/ExternalStorageLocation.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.run; /** * Describes the location where the JSON payload is stored in external storage. * * <p>The location is described using the following fields: * * <ul> * <li>uri: The uri of the json file in external storage. * <li>path: The relative path of the file in external storage. * </ul> */ public class ExternalStorageLocation { private String uri; private String path; public String getUri() { return uri; } public void setUri(String uri) { this.uri = uri; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } @Override public String toString() { return "ExternalStorageLocation{" + "uri='" + uri + '\'' + ", path='" + path + '\'' + '}'; } }
6,903
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.run; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashSet; import java.util.Objects; import java.util.Set; import java.util.TimeZone; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.utils.SummaryUtil; /** Captures workflow summary info to be indexed in Elastic Search. */ @ProtoMessage public class WorkflowSummary { /** The time should be stored as GMT */ private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); @ProtoField(id = 1) private String workflowType; @ProtoField(id = 2) private int version; @ProtoField(id = 3) private String workflowId; @ProtoField(id = 4) private String correlationId; @ProtoField(id = 5) private String startTime; @ProtoField(id = 6) private String updateTime; @ProtoField(id = 7) private String endTime; @ProtoField(id = 8) private Workflow.WorkflowStatus status; @ProtoField(id = 9) private String input; @ProtoField(id = 10) private String output; @ProtoField(id = 11) private String reasonForIncompletion; @ProtoField(id = 12) private long executionTime; @ProtoField(id = 13) private String event; @ProtoField(id = 14) private String failedReferenceTaskNames = ""; @ProtoField(id = 15) private String externalInputPayloadStoragePath; @ProtoField(id = 16) private String externalOutputPayloadStoragePath; @ProtoField(id = 17) private int priority; @ProtoField(id = 18) private Set<String> failedTaskNames = new HashSet<>(); public WorkflowSummary() {} public WorkflowSummary(Workflow workflow) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); sdf.setTimeZone(GMT); this.workflowType = workflow.getWorkflowName(); this.version = workflow.getWorkflowVersion(); this.workflowId = workflow.getWorkflowId(); this.priority = workflow.getPriority(); this.correlationId = workflow.getCorrelationId(); if (workflow.getCreateTime() != null) { this.startTime = sdf.format(new Date(workflow.getCreateTime())); } if (workflow.getEndTime() > 0) { this.endTime = sdf.format(new Date(workflow.getEndTime())); } if (workflow.getUpdateTime() != null) { this.updateTime = sdf.format(new Date(workflow.getUpdateTime())); } this.status = workflow.getStatus(); if (workflow.getInput() != null) { this.input = SummaryUtil.serializeInputOutput(workflow.getInput()); } if (workflow.getOutput() != null) { this.output = SummaryUtil.serializeInputOutput(workflow.getOutput()); } this.reasonForIncompletion = workflow.getReasonForIncompletion(); if (workflow.getEndTime() > 0) { this.executionTime = workflow.getEndTime() - workflow.getStartTime(); } this.event = workflow.getEvent(); this.failedReferenceTaskNames = workflow.getFailedReferenceTaskNames().stream().collect(Collectors.joining(",")); this.failedTaskNames = workflow.getFailedTaskNames(); if (StringUtils.isNotBlank(workflow.getExternalInputPayloadStoragePath())) { this.externalInputPayloadStoragePath = workflow.getExternalInputPayloadStoragePath(); } if (StringUtils.isNotBlank(workflow.getExternalOutputPayloadStoragePath())) { this.externalOutputPayloadStoragePath = workflow.getExternalOutputPayloadStoragePath(); } } /** * @return the workflowType */ public String getWorkflowType() { return workflowType; } /** * @return the version */ public int getVersion() { return version; } /** * @return the workflowId */ public String getWorkflowId() { return workflowId; } /** * @return the correlationId */ public String getCorrelationId() { return correlationId; } /** * @return the startTime */ public String getStartTime() { return startTime; } /** * @return the endTime */ public String getEndTime() { return endTime; } /** * @return the status */ public WorkflowStatus getStatus() { return status; } /** * @return the input */ public String getInput() { return input; } public long getInputSize() { return input != null ? input.length() : 0; } /** * @return the output */ public String getOutput() { return output; } public long getOutputSize() { return output != null ? output.length() : 0; } /** * @return the reasonForIncompletion */ public String getReasonForIncompletion() { return reasonForIncompletion; } /** * @return the executionTime */ public long getExecutionTime() { return executionTime; } /** * @return the updateTime */ public String getUpdateTime() { return updateTime; } /** * @return The event */ public String getEvent() { return event; } /** * @param event The event */ public void setEvent(String event) { this.event = event; } public String getFailedReferenceTaskNames() { return failedReferenceTaskNames; } public void setFailedReferenceTaskNames(String failedReferenceTaskNames) { this.failedReferenceTaskNames = failedReferenceTaskNames; } public Set<String> getFailedTaskNames() { return failedTaskNames; } public void setFailedTaskNames(Set<String> failedTaskNames) { this.failedTaskNames = failedTaskNames; } public void setWorkflowType(String workflowType) { this.workflowType = workflowType; } public void setVersion(int version) { this.version = version; } public void setWorkflowId(String workflowId) { this.workflowId = workflowId; } public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } public void setStartTime(String startTime) { this.startTime = startTime; } public void setUpdateTime(String updateTime) { this.updateTime = updateTime; } public void setEndTime(String endTime) { this.endTime = endTime; } public void setStatus(WorkflowStatus status) { this.status = status; } public void setInput(String input) { this.input = input; } public void setOutput(String output) { this.output = output; } public void setReasonForIncompletion(String reasonForIncompletion) { this.reasonForIncompletion = reasonForIncompletion; } public void setExecutionTime(long executionTime) { this.executionTime = executionTime; } /** * @return the external storage path of the workflow input payload */ public String getExternalInputPayloadStoragePath() { return externalInputPayloadStoragePath; } /** * @param externalInputPayloadStoragePath the external storage path where the workflow input * payload is stored */ public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; } /** * @return the external storage path of the workflow output payload */ public String getExternalOutputPayloadStoragePath() { return externalOutputPayloadStoragePath; } /** * @param externalOutputPayloadStoragePath the external storage path where the workflow output * payload is stored */ public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; } /** * @return the priority to define on tasks */ public int getPriority() { return priority; } /** * @param priority priority of tasks (between 0 and 99) */ public void setPriority(int priority) { this.priority = priority; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WorkflowSummary that = (WorkflowSummary) o; return getVersion() == that.getVersion() && getExecutionTime() == that.getExecutionTime() && getPriority() == that.getPriority() && getWorkflowType().equals(that.getWorkflowType()) && getWorkflowId().equals(that.getWorkflowId()) && Objects.equals(getCorrelationId(), that.getCorrelationId()) && StringUtils.equals(getStartTime(), that.getStartTime()) && StringUtils.equals(getUpdateTime(), that.getUpdateTime()) && StringUtils.equals(getEndTime(), that.getEndTime()) && getStatus() == that.getStatus() && Objects.equals(getReasonForIncompletion(), that.getReasonForIncompletion()) && Objects.equals(getEvent(), that.getEvent()); } @Override public int hashCode() { return Objects.hash( getWorkflowType(), getVersion(), getWorkflowId(), getCorrelationId(), getStartTime(), getUpdateTime(), getEndTime(), getStatus(), getReasonForIncompletion(), getExecutionTime(), getEvent(), getPriority()); } }
6,904
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/run/Workflow.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.run; import java.util.*; import java.util.stream.Collectors; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import org.apache.commons.lang3.StringUtils; import com.netflix.conductor.annotations.protogen.ProtoEnum; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.metadata.Auditable; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; @ProtoMessage public class Workflow extends Auditable { @ProtoEnum public enum WorkflowStatus { RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), PAUSED(false, true); private final boolean terminal; private final boolean successful; WorkflowStatus(boolean terminal, boolean successful) { this.terminal = terminal; this.successful = successful; } public boolean isTerminal() { return terminal; } public boolean isSuccessful() { return successful; } } @ProtoField(id = 1) private WorkflowStatus status = WorkflowStatus.RUNNING; @ProtoField(id = 2) private long endTime; @ProtoField(id = 3) private String workflowId; @ProtoField(id = 4) private String parentWorkflowId; @ProtoField(id = 5) private String parentWorkflowTaskId; @ProtoField(id = 6) private List<Task> tasks = new LinkedList<>(); @ProtoField(id = 8) private Map<String, Object> input = new HashMap<>(); @ProtoField(id = 9) private Map<String, Object> output = new HashMap<>(); // ids 10,11 are reserved @ProtoField(id = 12) private String correlationId; @ProtoField(id = 13) private String reRunFromWorkflowId; @ProtoField(id = 14) private String reasonForIncompletion; // id 15 is reserved @ProtoField(id = 16) private String event; @ProtoField(id = 17) private Map<String, String> taskToDomain = new HashMap<>(); @ProtoField(id = 18) private Set<String> failedReferenceTaskNames = new HashSet<>(); @ProtoField(id = 19) private WorkflowDef workflowDefinition; @ProtoField(id = 20) private String externalInputPayloadStoragePath; @ProtoField(id = 21) private String externalOutputPayloadStoragePath; @ProtoField(id = 22) @Min(value = 0, message = "workflow priority: ${validatedValue} should be minimum {value}") @Max(value = 99, message = "workflow priority: ${validatedValue} should be maximum {value}") private int priority; @ProtoField(id = 23) private Map<String, Object> variables = new HashMap<>(); @ProtoField(id = 24) private long lastRetriedTime; @ProtoField(id = 25) private Set<String> failedTaskNames = new HashSet<>(); public Workflow() {} /** * @return the status */ public WorkflowStatus getStatus() { return status; } /** * @param status the status to set */ public void setStatus(WorkflowStatus status) { this.status = status; } /** * @return the startTime */ public long getStartTime() { return getCreateTime(); } /** * @param startTime the startTime to set */ public void setStartTime(long startTime) { this.setCreateTime(startTime); } /** * @return the endTime */ public long getEndTime() { return endTime; } /** * @param endTime the endTime to set */ public void setEndTime(long endTime) { this.endTime = endTime; } /** * @return the workflowId */ public String getWorkflowId() { return workflowId; } /** * @param workflowId the workflowId to set */ public void setWorkflowId(String workflowId) { this.workflowId = workflowId; } /** * @return the tasks which are scheduled, in progress or completed. */ public List<Task> getTasks() { return tasks; } /** * @param tasks the tasks to set */ public void setTasks(List<Task> tasks) { this.tasks = tasks; } /** * @return the input */ public Map<String, Object> getInput() { return input; } /** * @param input the input to set */ public void setInput(Map<String, Object> input) { if (input == null) { input = new HashMap<>(); } this.input = input; } /** * @return the task to domain map */ public Map<String, String> getTaskToDomain() { return taskToDomain; } /** * @param taskToDomain the task to domain map */ public void setTaskToDomain(Map<String, String> taskToDomain) { this.taskToDomain = taskToDomain; } /** * @return the output */ public Map<String, Object> getOutput() { return output; } /** * @param output the output to set */ public void setOutput(Map<String, Object> output) { if (output == null) { output = new HashMap<>(); } this.output = output; } /** * @return The correlation id used when starting the workflow */ public String getCorrelationId() { return correlationId; } /** * @param correlationId the correlation id */ public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } public String getReRunFromWorkflowId() { return reRunFromWorkflowId; } public void setReRunFromWorkflowId(String reRunFromWorkflowId) { this.reRunFromWorkflowId = reRunFromWorkflowId; } public String getReasonForIncompletion() { return reasonForIncompletion; } public void setReasonForIncompletion(String reasonForIncompletion) { this.reasonForIncompletion = reasonForIncompletion; } /** * @return the parentWorkflowId */ public String getParentWorkflowId() { return parentWorkflowId; } /** * @param parentWorkflowId the parentWorkflowId to set */ public void setParentWorkflowId(String parentWorkflowId) { this.parentWorkflowId = parentWorkflowId; } /** * @return the parentWorkflowTaskId */ public String getParentWorkflowTaskId() { return parentWorkflowTaskId; } /** * @param parentWorkflowTaskId the parentWorkflowTaskId to set */ public void setParentWorkflowTaskId(String parentWorkflowTaskId) { this.parentWorkflowTaskId = parentWorkflowTaskId; } /** * @return Name of the event that started the workflow */ public String getEvent() { return event; } /** * @param event Name of the event that started the workflow */ public void setEvent(String event) { this.event = event; } public Set<String> getFailedReferenceTaskNames() { return failedReferenceTaskNames; } public void setFailedReferenceTaskNames(Set<String> failedReferenceTaskNames) { this.failedReferenceTaskNames = failedReferenceTaskNames; } public Set<String> getFailedTaskNames() { return failedTaskNames; } public void setFailedTaskNames(Set<String> failedTaskNames) { this.failedTaskNames = failedTaskNames; } public WorkflowDef getWorkflowDefinition() { return workflowDefinition; } public void setWorkflowDefinition(WorkflowDef workflowDefinition) { this.workflowDefinition = workflowDefinition; } /** * @return the external storage path of the workflow input payload */ public String getExternalInputPayloadStoragePath() { return externalInputPayloadStoragePath; } /** * @param externalInputPayloadStoragePath the external storage path where the workflow input * payload is stored */ public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; } /** * @return the external storage path of the workflow output payload */ public String getExternalOutputPayloadStoragePath() { return externalOutputPayloadStoragePath; } /** * @return the priority to define on tasks */ public int getPriority() { return priority; } /** * @param priority priority of tasks (between 0 and 99) */ public void setPriority(int priority) { if (priority < 0 || priority > 99) { throw new IllegalArgumentException("priority MUST be between 0 and 99 (inclusive)"); } this.priority = priority; } /** * Convenience method for accessing the workflow definition name. * * @return the workflow definition name. */ public String getWorkflowName() { if (workflowDefinition == null) { throw new NullPointerException("Workflow definition is null"); } return workflowDefinition.getName(); } /** * Convenience method for accessing the workflow definition version. * * @return the workflow definition version. */ public int getWorkflowVersion() { if (workflowDefinition == null) { throw new NullPointerException("Workflow definition is null"); } return workflowDefinition.getVersion(); } /** * @param externalOutputPayloadStoragePath the external storage path where the workflow output * payload is stored */ public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; } /** * @return the global workflow variables */ public Map<String, Object> getVariables() { return variables; } /** * @param variables the set of global workflow variables to set */ public void setVariables(Map<String, Object> variables) { this.variables = variables; } /** * Captures the last time the workflow was retried * * @return the last retried time of the workflow */ public long getLastRetriedTime() { return lastRetriedTime; } /** * @param lastRetriedTime time in milliseconds when the workflow is retried */ public void setLastRetriedTime(long lastRetriedTime) { this.lastRetriedTime = lastRetriedTime; } public boolean hasParent() { return StringUtils.isNotEmpty(parentWorkflowId); } public Task getTaskByRefName(String refName) { if (refName == null) { throw new RuntimeException( "refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value"); } LinkedList<Task> found = new LinkedList<>(); for (Task t : tasks) { if (t.getReferenceTaskName() == null) { throw new RuntimeException( "Task " + t.getTaskDefName() + ", seq=" + t.getSeq() + " does not have reference name specified."); } if (t.getReferenceTaskName().equals(refName)) { found.add(t); } } if (found.isEmpty()) { return null; } return found.getLast(); } /** * @return a deep copy of the workflow instance */ public Workflow copy() { Workflow copy = new Workflow(); copy.setInput(input); copy.setOutput(output); copy.setStatus(status); copy.setWorkflowId(workflowId); copy.setParentWorkflowId(parentWorkflowId); copy.setParentWorkflowTaskId(parentWorkflowTaskId); copy.setReRunFromWorkflowId(reRunFromWorkflowId); copy.setCorrelationId(correlationId); copy.setEvent(event); copy.setReasonForIncompletion(reasonForIncompletion); copy.setWorkflowDefinition(workflowDefinition); copy.setPriority(priority); copy.setTasks(tasks.stream().map(Task::deepCopy).collect(Collectors.toList())); copy.setVariables(variables); copy.setEndTime(endTime); copy.setLastRetriedTime(lastRetriedTime); copy.setTaskToDomain(taskToDomain); copy.setFailedReferenceTaskNames(failedReferenceTaskNames); copy.setFailedTaskNames(failedTaskNames); copy.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath); copy.setExternalOutputPayloadStoragePath(externalOutputPayloadStoragePath); return copy; } @Override public String toString() { String name = workflowDefinition != null ? workflowDefinition.getName() : null; Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; return String.format("%s.%s/%s.%s", name, version, workflowId, status); } /** * A string representation of all relevant fields that identify this workflow. Intended for use * in log and other system generated messages. */ public String toShortString() { String name = workflowDefinition != null ? workflowDefinition.getName() : null; Integer version = workflowDefinition != null ? workflowDefinition.getVersion() : null; return String.format("%s.%s/%s", name, version, workflowId); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Workflow workflow = (Workflow) o; return getEndTime() == workflow.getEndTime() && getWorkflowVersion() == workflow.getWorkflowVersion() && getStatus() == workflow.getStatus() && Objects.equals(getWorkflowId(), workflow.getWorkflowId()) && Objects.equals(getParentWorkflowId(), workflow.getParentWorkflowId()) && Objects.equals(getParentWorkflowTaskId(), workflow.getParentWorkflowTaskId()) && Objects.equals(getTasks(), workflow.getTasks()) && Objects.equals(getInput(), workflow.getInput()) && Objects.equals(getOutput(), workflow.getOutput()) && Objects.equals(getWorkflowName(), workflow.getWorkflowName()) && Objects.equals(getCorrelationId(), workflow.getCorrelationId()) && Objects.equals(getReRunFromWorkflowId(), workflow.getReRunFromWorkflowId()) && Objects.equals(getReasonForIncompletion(), workflow.getReasonForIncompletion()) && Objects.equals(getEvent(), workflow.getEvent()) && Objects.equals(getTaskToDomain(), workflow.getTaskToDomain()) && Objects.equals( getFailedReferenceTaskNames(), workflow.getFailedReferenceTaskNames()) && Objects.equals(getFailedTaskNames(), workflow.getFailedTaskNames()) && Objects.equals( getExternalInputPayloadStoragePath(), workflow.getExternalInputPayloadStoragePath()) && Objects.equals( getExternalOutputPayloadStoragePath(), workflow.getExternalOutputPayloadStoragePath()) && Objects.equals(getPriority(), workflow.getPriority()) && Objects.equals(getWorkflowDefinition(), workflow.getWorkflowDefinition()) && Objects.equals(getVariables(), workflow.getVariables()) && Objects.equals(getLastRetriedTime(), workflow.getLastRetriedTime()); } @Override public int hashCode() { return Objects.hash( getStatus(), getEndTime(), getWorkflowId(), getParentWorkflowId(), getParentWorkflowTaskId(), getTasks(), getInput(), getOutput(), getWorkflowName(), getWorkflowVersion(), getCorrelationId(), getReRunFromWorkflowId(), getReasonForIncompletion(), getEvent(), getTaskToDomain(), getFailedReferenceTaskNames(), getFailedTaskNames(), getWorkflowDefinition(), getExternalInputPayloadStoragePath(), getExternalOutputPayloadStoragePath(), getPriority(), getVariables(), getLastRetriedTime()); } }
6,905
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/Auditable.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata; public abstract class Auditable { private String ownerApp; private Long createTime; private Long updateTime; private String createdBy; private String updatedBy; /** * @return the ownerApp */ public String getOwnerApp() { return ownerApp; } /** * @param ownerApp the ownerApp to set */ public void setOwnerApp(String ownerApp) { this.ownerApp = ownerApp; } /** * @return the createTime */ public Long getCreateTime() { return createTime; } /** * @param createTime the createTime to set */ public void setCreateTime(Long createTime) { this.createTime = createTime; } /** * @return the updateTime */ public Long getUpdateTime() { return updateTime; } /** * @param updateTime the updateTime to set */ public void setUpdateTime(Long updateTime) { this.updateTime = updateTime; } /** * @return the createdBy */ public String getCreatedBy() { return createdBy; } /** * @param createdBy the createdBy to set */ public void setCreatedBy(String createdBy) { this.createdBy = createdBy; } /** * @return the updatedBy */ public String getUpdatedBy() { return updatedBy; } /** * @param updatedBy the updatedBy to set */ public void setUpdatedBy(String updatedBy) { this.updatedBy = updatedBy; } }
6,906
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/BaseDef.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata; import java.util.Collections; import java.util.EnumMap; import java.util.Map; import com.netflix.conductor.common.metadata.acl.Permission; /** * A base class for {@link com.netflix.conductor.common.metadata.workflow.WorkflowDef} and {@link * com.netflix.conductor.common.metadata.tasks.TaskDef}. */ public abstract class BaseDef extends Auditable { private final Map<Permission, String> accessPolicy = new EnumMap<>(Permission.class); public void addPermission(Permission permission, String allowedAuthority) { this.accessPolicy.put(permission, allowedAuthority); } public void addPermissionIfAbsent(Permission permission, String allowedAuthority) { this.accessPolicy.putIfAbsent(permission, allowedAuthority); } public void removePermission(Permission permission) { this.accessPolicy.remove(permission); } public String getAllowedAuthority(Permission permission) { return this.accessPolicy.get(permission); } public void clearAccessPolicy() { this.accessPolicy.clear(); } public Map<Permission, String> getAccessPolicy() { return Collections.unmodifiableMap(this.accessPolicy); } public void setAccessPolicy(Map<Permission, String> accessPolicy) { this.accessPolicy.putAll(accessPolicy); } }
6,907
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.tasks; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import javax.validation.Valid; import javax.validation.constraints.Email; import javax.validation.constraints.Min; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import com.netflix.conductor.annotations.protogen.ProtoEnum; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint; import com.netflix.conductor.common.constraints.TaskTimeoutConstraint; import com.netflix.conductor.common.metadata.BaseDef; @ProtoMessage @TaskTimeoutConstraint @Valid public class TaskDef extends BaseDef { @ProtoEnum public enum TimeoutPolicy { RETRY, TIME_OUT_WF, ALERT_ONLY } @ProtoEnum public enum RetryLogic { FIXED, EXPONENTIAL_BACKOFF, LINEAR_BACKOFF } public static final int ONE_HOUR = 60 * 60; /** Unique name identifying the task. The name is unique across */ @NotEmpty(message = "TaskDef name cannot be null or empty") @ProtoField(id = 1) private String name; @ProtoField(id = 2) private String description; @ProtoField(id = 3) @Min(value = 0, message = "TaskDef retryCount: {value} must be >= 0") private int retryCount = 3; // Default @ProtoField(id = 4) @NotNull private long timeoutSeconds; @ProtoField(id = 5) private List<String> inputKeys = new ArrayList<>(); @ProtoField(id = 6) private List<String> outputKeys = new ArrayList<>(); @ProtoField(id = 7) private TimeoutPolicy timeoutPolicy = TimeoutPolicy.TIME_OUT_WF; @ProtoField(id = 8) private RetryLogic retryLogic = RetryLogic.FIXED; @ProtoField(id = 9) private int retryDelaySeconds = 60; @ProtoField(id = 10) @Min( value = 1, message = "TaskDef responseTimeoutSeconds: ${validatedValue} should be minimum {value} second") private long responseTimeoutSeconds = ONE_HOUR; @ProtoField(id = 11) private Integer concurrentExecLimit; @ProtoField(id = 12) private Map<String, Object> inputTemplate = new HashMap<>(); // This field is deprecated, do not use id 13. // @ProtoField(id = 13) // private Integer rateLimitPerSecond; @ProtoField(id = 14) private Integer rateLimitPerFrequency; @ProtoField(id = 15) private Integer rateLimitFrequencyInSeconds; @ProtoField(id = 16) private String isolationGroupId; @ProtoField(id = 17) private String executionNameSpace; @ProtoField(id = 18) @OwnerEmailMandatoryConstraint @Email(message = "ownerEmail should be valid email address") private String ownerEmail; @ProtoField(id = 19) @Min(value = 0, message = "TaskDef pollTimeoutSeconds: {value} must be >= 0") private Integer pollTimeoutSeconds; @ProtoField(id = 20) @Min(value = 1, message = "Backoff scale factor. Applicable for LINEAR_BACKOFF") private Integer backoffScaleFactor = 1; public TaskDef() {} public TaskDef(String name) { this.name = name; } public TaskDef(String name, String description) { this.name = name; this.description = description; } public TaskDef(String name, String description, int retryCount, long timeoutSeconds) { this.name = name; this.description = description; this.retryCount = retryCount; this.timeoutSeconds = timeoutSeconds; } public TaskDef( String name, String description, String ownerEmail, int retryCount, long timeoutSeconds, long responseTimeoutSeconds) { this.name = name; this.description = description; this.ownerEmail = ownerEmail; this.retryCount = retryCount; this.timeoutSeconds = timeoutSeconds; this.responseTimeoutSeconds = responseTimeoutSeconds; } /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the retryCount */ public int getRetryCount() { return retryCount; } /** * @param retryCount the retryCount to set */ public void setRetryCount(int retryCount) { this.retryCount = retryCount; } /** * @return the timeoutSeconds */ public long getTimeoutSeconds() { return timeoutSeconds; } /** * @param timeoutSeconds the timeoutSeconds to set */ public void setTimeoutSeconds(long timeoutSeconds) { this.timeoutSeconds = timeoutSeconds; } /** * @return Returns the input keys */ public List<String> getInputKeys() { return inputKeys; } /** * @param inputKeys Set of keys that the task accepts in the input map */ public void setInputKeys(List<String> inputKeys) { this.inputKeys = inputKeys; } /** * @return Returns the output keys for the task when executed */ public List<String> getOutputKeys() { return outputKeys; } /** * @param outputKeys Sets the output keys */ public void setOutputKeys(List<String> outputKeys) { this.outputKeys = outputKeys; } /** * @return the timeoutPolicy */ public TimeoutPolicy getTimeoutPolicy() { return timeoutPolicy; } /** * @param timeoutPolicy the timeoutPolicy to set */ public void setTimeoutPolicy(TimeoutPolicy timeoutPolicy) { this.timeoutPolicy = timeoutPolicy; } /** * @return the retryLogic */ public RetryLogic getRetryLogic() { return retryLogic; } /** * @param retryLogic the retryLogic to set */ public void setRetryLogic(RetryLogic retryLogic) { this.retryLogic = retryLogic; } /** * @return the retryDelaySeconds */ public int getRetryDelaySeconds() { return retryDelaySeconds; } /** * @return the timeout for task to send response. After this timeout, the task will be re-queued */ public long getResponseTimeoutSeconds() { return responseTimeoutSeconds; } /** * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the * task will be re-queued */ public void setResponseTimeoutSeconds(long responseTimeoutSeconds) { this.responseTimeoutSeconds = responseTimeoutSeconds; } /** * @param retryDelaySeconds the retryDelaySeconds to set */ public void setRetryDelaySeconds(int retryDelaySeconds) { this.retryDelaySeconds = retryDelaySeconds; } /** * @return the inputTemplate */ public Map<String, Object> getInputTemplate() { return inputTemplate; } /** * @return rateLimitPerFrequency The max number of tasks that will be allowed to be executed per * rateLimitFrequencyInSeconds. */ public Integer getRateLimitPerFrequency() { return rateLimitPerFrequency == null ? 0 : rateLimitPerFrequency; } /** * @param rateLimitPerFrequency The max number of tasks that will be allowed to be executed per * rateLimitFrequencyInSeconds. Setting the value to 0 removes the rate limit */ public void setRateLimitPerFrequency(Integer rateLimitPerFrequency) { this.rateLimitPerFrequency = rateLimitPerFrequency; } /** * @return rateLimitFrequencyInSeconds: The time bucket that is used to rate limit tasks based * on {@link #getRateLimitPerFrequency()} If null or not set, then defaults to 1 second */ public Integer getRateLimitFrequencyInSeconds() { return rateLimitFrequencyInSeconds == null ? 1 : rateLimitFrequencyInSeconds; } /** * @param rateLimitFrequencyInSeconds: The time window/bucket for which the rate limit needs to * be applied. This will only have affect if {@link #getRateLimitPerFrequency()} is greater * than zero */ public void setRateLimitFrequencyInSeconds(Integer rateLimitFrequencyInSeconds) { this.rateLimitFrequencyInSeconds = rateLimitFrequencyInSeconds; } /** * @param concurrentExecLimit Limit of number of concurrent task that can be IN_PROGRESS at a * given time. Seting the value to 0 removes the limit. */ public void setConcurrentExecLimit(Integer concurrentExecLimit) { this.concurrentExecLimit = concurrentExecLimit; } /** * @return Limit of number of concurrent task that can be IN_PROGRESS at a given time */ public Integer getConcurrentExecLimit() { return concurrentExecLimit; } /** * @return concurrency limit */ public int concurrencyLimit() { return concurrentExecLimit == null ? 0 : concurrentExecLimit; } /** * @param inputTemplate the inputTemplate to set */ public void setInputTemplate(Map<String, Object> inputTemplate) { this.inputTemplate = inputTemplate; } public String getIsolationGroupId() { return isolationGroupId; } public void setIsolationGroupId(String isolationGroupId) { this.isolationGroupId = isolationGroupId; } public String getExecutionNameSpace() { return executionNameSpace; } public void setExecutionNameSpace(String executionNameSpace) { this.executionNameSpace = executionNameSpace; } /** * @return the email of the owner of this task definition */ public String getOwnerEmail() { return ownerEmail; } /** * @param ownerEmail the owner email to set */ public void setOwnerEmail(String ownerEmail) { this.ownerEmail = ownerEmail; } /** * @param pollTimeoutSeconds the poll timeout to set */ public void setPollTimeoutSeconds(Integer pollTimeoutSeconds) { this.pollTimeoutSeconds = pollTimeoutSeconds; } /** * @return the poll timeout of this task definition */ public Integer getPollTimeoutSeconds() { return pollTimeoutSeconds; } /** * @param backoffScaleFactor the backoff rate to set */ public void setBackoffScaleFactor(Integer backoffScaleFactor) { this.backoffScaleFactor = backoffScaleFactor; } /** * @return the backoff rate of this task definition */ public Integer getBackoffScaleFactor() { return backoffScaleFactor; } @Override public String toString() { return name; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TaskDef taskDef = (TaskDef) o; return getRetryCount() == taskDef.getRetryCount() && getTimeoutSeconds() == taskDef.getTimeoutSeconds() && getRetryDelaySeconds() == taskDef.getRetryDelaySeconds() && getBackoffScaleFactor() == taskDef.getBackoffScaleFactor() && getResponseTimeoutSeconds() == taskDef.getResponseTimeoutSeconds() && Objects.equals(getName(), taskDef.getName()) && Objects.equals(getDescription(), taskDef.getDescription()) && Objects.equals(getInputKeys(), taskDef.getInputKeys()) && Objects.equals(getOutputKeys(), taskDef.getOutputKeys()) && getTimeoutPolicy() == taskDef.getTimeoutPolicy() && getRetryLogic() == taskDef.getRetryLogic() && Objects.equals(getConcurrentExecLimit(), taskDef.getConcurrentExecLimit()) && Objects.equals(getRateLimitPerFrequency(), taskDef.getRateLimitPerFrequency()) && Objects.equals(getInputTemplate(), taskDef.getInputTemplate()) && Objects.equals(getIsolationGroupId(), taskDef.getIsolationGroupId()) && Objects.equals(getExecutionNameSpace(), taskDef.getExecutionNameSpace()) && Objects.equals(getOwnerEmail(), taskDef.getOwnerEmail()); } @Override public int hashCode() { return Objects.hash( getName(), getDescription(), getRetryCount(), getTimeoutSeconds(), getInputKeys(), getOutputKeys(), getTimeoutPolicy(), getRetryLogic(), getRetryDelaySeconds(), getBackoffScaleFactor(), getResponseTimeoutSeconds(), getConcurrentExecLimit(), getRateLimitPerFrequency(), getInputTemplate(), getIsolationGroupId(), getExecutionNameSpace(), getOwnerEmail()); } }
6,908
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.tasks; import java.util.Objects; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; @ProtoMessage public class PollData { @ProtoField(id = 1) private String queueName; @ProtoField(id = 2) private String domain; @ProtoField(id = 3) private String workerId; @ProtoField(id = 4) private long lastPollTime; public PollData() { super(); } public PollData(String queueName, String domain, String workerId, long lastPollTime) { super(); this.queueName = queueName; this.domain = domain; this.workerId = workerId; this.lastPollTime = lastPollTime; } public String getQueueName() { return queueName; } public void setQueueName(String queueName) { this.queueName = queueName; } public String getDomain() { return domain; } public void setDomain(String domain) { this.domain = domain; } public String getWorkerId() { return workerId; } public void setWorkerId(String workerId) { this.workerId = workerId; } public long getLastPollTime() { return lastPollTime; } public void setLastPollTime(long lastPollTime) { this.lastPollTime = lastPollTime; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PollData pollData = (PollData) o; return getLastPollTime() == pollData.getLastPollTime() && Objects.equals(getQueueName(), pollData.getQueueName()) && Objects.equals(getDomain(), pollData.getDomain()) && Objects.equals(getWorkerId(), pollData.getWorkerId()); } @Override public int hashCode() { return Objects.hash(getQueueName(), getDomain(), getWorkerId(), getLastPollTime()); } @Override public String toString() { return "PollData{" + "queueName='" + queueName + '\'' + ", domain='" + domain + '\'' + ", workerId='" + workerId + '\'' + ", lastPollTime=" + lastPollTime + '}'; } }
6,909
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.tasks; import java.util.Objects; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; /** Model that represents the task's execution log. */ @ProtoMessage public class TaskExecLog { @ProtoField(id = 1) private String log; @ProtoField(id = 2) private String taskId; @ProtoField(id = 3) private long createdTime; public TaskExecLog() {} public TaskExecLog(String log) { this.log = log; this.createdTime = System.currentTimeMillis(); } /** * @return Task Exec Log */ public String getLog() { return log; } /** * @param log The Log */ public void setLog(String log) { this.log = log; } /** * @return the taskId */ public String getTaskId() { return taskId; } /** * @param taskId the taskId to set */ public void setTaskId(String taskId) { this.taskId = taskId; } /** * @return the createdTime */ public long getCreatedTime() { return createdTime; } /** * @param createdTime the createdTime to set */ public void setCreatedTime(long createdTime) { this.createdTime = createdTime; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TaskExecLog that = (TaskExecLog) o; return createdTime == that.createdTime && Objects.equals(log, that.log) && Objects.equals(taskId, that.taskId); } @Override public int hashCode() { return Objects.hash(log, taskId, createdTime); } }
6,910
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskType.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.tasks; import java.util.HashSet; import java.util.Set; import com.netflix.conductor.annotations.protogen.ProtoEnum; @ProtoEnum public enum TaskType { SIMPLE, DYNAMIC, FORK_JOIN, FORK_JOIN_DYNAMIC, DECISION, SWITCH, JOIN, DO_WHILE, SUB_WORKFLOW, START_WORKFLOW, EVENT, WAIT, HUMAN, USER_DEFINED, HTTP, LAMBDA, INLINE, EXCLUSIVE_JOIN, TERMINATE, KAFKA_PUBLISH, JSON_JQ_TRANSFORM, SET_VARIABLE, NOOP; /** * TaskType constants representing each of the possible enumeration values. Motivation: to not * have any hardcoded/inline strings used in the code. */ public static final String TASK_TYPE_DECISION = "DECISION"; public static final String TASK_TYPE_SWITCH = "SWITCH"; public static final String TASK_TYPE_DYNAMIC = "DYNAMIC"; public static final String TASK_TYPE_JOIN = "JOIN"; public static final String TASK_TYPE_DO_WHILE = "DO_WHILE"; public static final String TASK_TYPE_FORK_JOIN_DYNAMIC = "FORK_JOIN_DYNAMIC"; public static final String TASK_TYPE_EVENT = "EVENT"; public static final String TASK_TYPE_WAIT = "WAIT"; public static final String TASK_TYPE_HUMAN = "HUMAN"; public static final String TASK_TYPE_SUB_WORKFLOW = "SUB_WORKFLOW"; public static final String TASK_TYPE_START_WORKFLOW = "START_WORKFLOW"; public static final String TASK_TYPE_FORK_JOIN = "FORK_JOIN"; public static final String TASK_TYPE_SIMPLE = "SIMPLE"; public static final String TASK_TYPE_HTTP = "HTTP"; public static final String TASK_TYPE_LAMBDA = "LAMBDA"; public static final String TASK_TYPE_INLINE = "INLINE"; public static final String TASK_TYPE_EXCLUSIVE_JOIN = "EXCLUSIVE_JOIN"; public static final String TASK_TYPE_TERMINATE = "TERMINATE"; public static final String TASK_TYPE_KAFKA_PUBLISH = "KAFKA_PUBLISH"; public static final String TASK_TYPE_JSON_JQ_TRANSFORM = "JSON_JQ_TRANSFORM"; public static final String TASK_TYPE_SET_VARIABLE = "SET_VARIABLE"; public static final String TASK_TYPE_FORK = "FORK"; public static final String TASK_TYPE_NOOP = "NOOP"; private static final Set<String> BUILT_IN_TASKS = new HashSet<>(); static { BUILT_IN_TASKS.add(TASK_TYPE_DECISION); BUILT_IN_TASKS.add(TASK_TYPE_SWITCH); BUILT_IN_TASKS.add(TASK_TYPE_FORK); BUILT_IN_TASKS.add(TASK_TYPE_JOIN); BUILT_IN_TASKS.add(TASK_TYPE_EXCLUSIVE_JOIN); BUILT_IN_TASKS.add(TASK_TYPE_DO_WHILE); } /** * Converts a task type string to {@link TaskType}. For an unknown string, the value is * defaulted to {@link TaskType#USER_DEFINED}. * * <p>NOTE: Use {@link Enum#valueOf(Class, String)} if the default of USER_DEFINED is not * necessary. * * @param taskType The task type string. * @return The {@link TaskType} enum. */ public static TaskType of(String taskType) { try { return TaskType.valueOf(taskType); } catch (IllegalArgumentException iae) { return TaskType.USER_DEFINED; } } public static boolean isBuiltIn(String taskType) { return BUILT_IN_TASKS.contains(taskType); } }
6,911
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.tasks; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import javax.validation.constraints.NotEmpty; import org.apache.commons.lang3.StringUtils; import com.netflix.conductor.annotations.protogen.ProtoEnum; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.google.protobuf.Any; import io.swagger.v3.oas.annotations.Hidden; /** Result of the task execution. */ @ProtoMessage public class TaskResult { @ProtoEnum public enum Status { IN_PROGRESS, FAILED, FAILED_WITH_TERMINAL_ERROR, COMPLETED } @NotEmpty(message = "Workflow Id cannot be null or empty") @ProtoField(id = 1) private String workflowInstanceId; @NotEmpty(message = "Task ID cannot be null or empty") @ProtoField(id = 2) private String taskId; @ProtoField(id = 3) private String reasonForIncompletion; @ProtoField(id = 4) private long callbackAfterSeconds; @ProtoField(id = 5) private String workerId; @ProtoField(id = 6) private Status status; @ProtoField(id = 7) private Map<String, Object> outputData = new HashMap<>(); @ProtoField(id = 8) @Hidden private Any outputMessage; private List<TaskExecLog> logs = new CopyOnWriteArrayList<>(); private String externalOutputPayloadStoragePath; private String subWorkflowId; private boolean extendLease; public TaskResult(Task task) { this.workflowInstanceId = task.getWorkflowInstanceId(); this.taskId = task.getTaskId(); this.reasonForIncompletion = task.getReasonForIncompletion(); this.callbackAfterSeconds = task.getCallbackAfterSeconds(); this.workerId = task.getWorkerId(); this.outputData = task.getOutputData(); this.externalOutputPayloadStoragePath = task.getExternalOutputPayloadStoragePath(); this.subWorkflowId = task.getSubWorkflowId(); switch (task.getStatus()) { case CANCELED: case COMPLETED_WITH_ERRORS: case TIMED_OUT: case SKIPPED: this.status = Status.FAILED; break; case SCHEDULED: this.status = Status.IN_PROGRESS; break; default: this.status = Status.valueOf(task.getStatus().name()); break; } } public TaskResult() {} /** * @return Workflow instance id for which the task result is produced */ public String getWorkflowInstanceId() { return workflowInstanceId; } public void setWorkflowInstanceId(String workflowInstanceId) { this.workflowInstanceId = workflowInstanceId; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getReasonForIncompletion() { return reasonForIncompletion; } public void setReasonForIncompletion(String reasonForIncompletion) { this.reasonForIncompletion = StringUtils.substring(reasonForIncompletion, 0, 500); } public long getCallbackAfterSeconds() { return callbackAfterSeconds; } /** * When set to non-zero values, the task remains in the queue for the specified seconds before * sent back to the worker when polled. Useful for the long running task, where the task is * updated as IN_PROGRESS and should not be polled out of the queue for a specified amount of * time. (delayed queue implementation) * * @param callbackAfterSeconds Amount of time in seconds the task should be held in the queue * before giving it to a polling worker. */ public void setCallbackAfterSeconds(long callbackAfterSeconds) { this.callbackAfterSeconds = callbackAfterSeconds; } public String getWorkerId() { return workerId; } /** * @param workerId a free form string identifying the worker host. Could be hostname, IP Address * or any other meaningful identifier that can help identify the host/process which executed * the task, in case of troubleshooting. */ public void setWorkerId(String workerId) { this.workerId = workerId; } /** * @return the status */ public Status getStatus() { return status; } /** * @param status Status of the task * <p><b>IN_PROGRESS</b>: Use this for long running tasks, indicating the task is still in * progress and should be checked again at a later time. e.g. the worker checks the status * of the job in the DB, while the job is being executed by another process. * <p><b>FAILED, FAILED_WITH_TERMINAL_ERROR, COMPLETED</b>: Terminal statuses for the task. * Use FAILED_WITH_TERMINAL_ERROR when you do not want the task to be retried. * @see #setCallbackAfterSeconds(long) */ public void setStatus(Status status) { this.status = status; } public Map<String, Object> getOutputData() { return outputData; } /** * @param outputData output data to be set for the task execution result */ public void setOutputData(Map<String, Object> outputData) { this.outputData = outputData; } /** * Adds output * * @param key output field * @param value value * @return current instance */ public TaskResult addOutputData(String key, Object value) { this.outputData.put(key, value); return this; } public Any getOutputMessage() { return outputMessage; } public void setOutputMessage(Any outputMessage) { this.outputMessage = outputMessage; } /** * @return Task execution logs */ public List<TaskExecLog> getLogs() { return logs; } /** * @param logs Task execution logs */ public void setLogs(List<TaskExecLog> logs) { this.logs = logs; } /** * @param log Log line to be added * @return Instance of TaskResult */ public TaskResult log(String log) { this.logs.add(new TaskExecLog(log)); return this; } /** * @return the path where the task output is stored in external storage */ public String getExternalOutputPayloadStoragePath() { return externalOutputPayloadStoragePath; } /** * @param externalOutputPayloadStoragePath path in the external storage where the task output is * stored */ public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; } public String getSubWorkflowId() { return subWorkflowId; } public void setSubWorkflowId(String subWorkflowId) { this.subWorkflowId = subWorkflowId; } public boolean isExtendLease() { return extendLease; } public void setExtendLease(boolean extendLease) { this.extendLease = extendLease; } @Override public String toString() { return "TaskResult{" + "workflowInstanceId='" + workflowInstanceId + '\'' + ", taskId='" + taskId + '\'' + ", reasonForIncompletion='" + reasonForIncompletion + '\'' + ", callbackAfterSeconds=" + callbackAfterSeconds + ", workerId='" + workerId + '\'' + ", status=" + status + ", outputData=" + outputData + ", outputMessage=" + outputMessage + ", logs=" + logs + ", externalOutputPayloadStoragePath='" + externalOutputPayloadStoragePath + '\'' + ", subWorkflowId='" + subWorkflowId + '\'' + ", extendLease='" + extendLease + '\'' + '}'; } public static TaskResult complete() { return newTaskResult(Status.COMPLETED); } public static TaskResult failed() { return newTaskResult(Status.FAILED); } public static TaskResult failed(String failureReason) { TaskResult result = newTaskResult(Status.FAILED); result.setReasonForIncompletion(failureReason); return result; } public static TaskResult inProgress() { return newTaskResult(Status.IN_PROGRESS); } public static TaskResult newTaskResult(Status status) { TaskResult result = new TaskResult(); result.setStatus(status); return result; } }
6,912
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.tasks; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; import org.apache.commons.lang3.StringUtils; import com.netflix.conductor.annotations.protogen.ProtoEnum; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.google.protobuf.Any; import io.swagger.v3.oas.annotations.Hidden; @ProtoMessage public class Task { @ProtoEnum public enum Status { IN_PROGRESS(false, true, true), CANCELED(true, false, false), FAILED(true, false, true), FAILED_WITH_TERMINAL_ERROR( true, false, false), // No retries even if retries are configured, the task and the related // workflow should be terminated COMPLETED(true, true, true), COMPLETED_WITH_ERRORS(true, true, true), SCHEDULED(false, true, true), TIMED_OUT(true, false, true), SKIPPED(true, true, false); private final boolean terminal; private final boolean successful; private final boolean retriable; Status(boolean terminal, boolean successful, boolean retriable) { this.terminal = terminal; this.successful = successful; this.retriable = retriable; } public boolean isTerminal() { return terminal; } public boolean isSuccessful() { return successful; } public boolean isRetriable() { return retriable; } } @ProtoField(id = 1) private String taskType; @ProtoField(id = 2) private Status status; @ProtoField(id = 3) private Map<String, Object> inputData = new HashMap<>(); @ProtoField(id = 4) private String referenceTaskName; @ProtoField(id = 5) private int retryCount; @ProtoField(id = 6) private int seq; @ProtoField(id = 7) private String correlationId; @ProtoField(id = 8) private int pollCount; @ProtoField(id = 9) private String taskDefName; /** Time when the task was scheduled */ @ProtoField(id = 10) private long scheduledTime; /** Time when the task was first polled */ @ProtoField(id = 11) private long startTime; /** Time when the task completed executing */ @ProtoField(id = 12) private long endTime; /** Time when the task was last updated */ @ProtoField(id = 13) private long updateTime; @ProtoField(id = 14) private int startDelayInSeconds; @ProtoField(id = 15) private String retriedTaskId; @ProtoField(id = 16) private boolean retried; @ProtoField(id = 17) private boolean executed; @ProtoField(id = 18) private boolean callbackFromWorker = true; @ProtoField(id = 19) private long responseTimeoutSeconds; @ProtoField(id = 20) private String workflowInstanceId; @ProtoField(id = 21) private String workflowType; @ProtoField(id = 22) private String taskId; @ProtoField(id = 23) private String reasonForIncompletion; @ProtoField(id = 24) private long callbackAfterSeconds; @ProtoField(id = 25) private String workerId; @ProtoField(id = 26) private Map<String, Object> outputData = new HashMap<>(); @ProtoField(id = 27) private WorkflowTask workflowTask; @ProtoField(id = 28) private String domain; @ProtoField(id = 29) @Hidden private Any inputMessage; @ProtoField(id = 30) @Hidden private Any outputMessage; // id 31 is reserved @ProtoField(id = 32) private int rateLimitPerFrequency; @ProtoField(id = 33) private int rateLimitFrequencyInSeconds; @ProtoField(id = 34) private String externalInputPayloadStoragePath; @ProtoField(id = 35) private String externalOutputPayloadStoragePath; @ProtoField(id = 36) private int workflowPriority; @ProtoField(id = 37) private String executionNameSpace; @ProtoField(id = 38) private String isolationGroupId; @ProtoField(id = 40) private int iteration; @ProtoField(id = 41) private String subWorkflowId; /** * Use to note that a sub workflow associated with SUB_WORKFLOW task has an action performed on * it directly. */ @ProtoField(id = 42) private boolean subworkflowChanged; public Task() {} /** * @return Type of the task * @see TaskType */ public String getTaskType() { return taskType; } public void setTaskType(String taskType) { this.taskType = taskType; } /** * @return Status of the task */ public Status getStatus() { return status; } /** * @param status Status of the task */ public void setStatus(Status status) { this.status = status; } public Map<String, Object> getInputData() { return inputData; } public void setInputData(Map<String, Object> inputData) { if (inputData == null) { inputData = new HashMap<>(); } this.inputData = inputData; } /** * @return the referenceTaskName */ public String getReferenceTaskName() { return referenceTaskName; } /** * @param referenceTaskName the referenceTaskName to set */ public void setReferenceTaskName(String referenceTaskName) { this.referenceTaskName = referenceTaskName; } /** * @return the correlationId */ public String getCorrelationId() { return correlationId; } /** * @param correlationId the correlationId to set */ public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } /** * @return the retryCount */ public int getRetryCount() { return retryCount; } /** * @param retryCount the retryCount to set */ public void setRetryCount(int retryCount) { this.retryCount = retryCount; } /** * @return the scheduledTime */ public long getScheduledTime() { return scheduledTime; } /** * @param scheduledTime the scheduledTime to set */ public void setScheduledTime(long scheduledTime) { this.scheduledTime = scheduledTime; } /** * @return the startTime */ public long getStartTime() { return startTime; } /** * @param startTime the startTime to set */ public void setStartTime(long startTime) { this.startTime = startTime; } /** * @return the endTime */ public long getEndTime() { return endTime; } /** * @param endTime the endTime to set */ public void setEndTime(long endTime) { this.endTime = endTime; } /** * @return the startDelayInSeconds */ public int getStartDelayInSeconds() { return startDelayInSeconds; } /** * @param startDelayInSeconds the startDelayInSeconds to set */ public void setStartDelayInSeconds(int startDelayInSeconds) { this.startDelayInSeconds = startDelayInSeconds; } /** * @return the retriedTaskId */ public String getRetriedTaskId() { return retriedTaskId; } /** * @param retriedTaskId the retriedTaskId to set */ public void setRetriedTaskId(String retriedTaskId) { this.retriedTaskId = retriedTaskId; } /** * @return the seq */ public int getSeq() { return seq; } /** * @param seq the seq to set */ public void setSeq(int seq) { this.seq = seq; } /** * @return the updateTime */ public long getUpdateTime() { return updateTime; } /** * @param updateTime the updateTime to set */ public void setUpdateTime(long updateTime) { this.updateTime = updateTime; } /** * @return the queueWaitTime */ public long getQueueWaitTime() { if (this.startTime > 0 && this.scheduledTime > 0) { if (this.updateTime > 0 && getCallbackAfterSeconds() > 0) { long waitTime = System.currentTimeMillis() - (this.updateTime + (getCallbackAfterSeconds() * 1000)); return waitTime > 0 ? waitTime : 0; } else { return this.startTime - this.scheduledTime; } } return 0L; } /** * @return True if the task has been retried after failure */ public boolean isRetried() { return retried; } /** * @param retried the retried to set */ public void setRetried(boolean retried) { this.retried = retried; } /** * @return True if the task has completed its lifecycle within conductor (from start to * completion to being updated in the datastore) */ public boolean isExecuted() { return executed; } /** * @param executed the executed value to set */ public void setExecuted(boolean executed) { this.executed = executed; } /** * @return No. of times task has been polled */ public int getPollCount() { return pollCount; } public void setPollCount(int pollCount) { this.pollCount = pollCount; } public void incrementPollCount() { ++this.pollCount; } public boolean isCallbackFromWorker() { return callbackFromWorker; } public void setCallbackFromWorker(boolean callbackFromWorker) { this.callbackFromWorker = callbackFromWorker; } /** * @return Name of the task definition */ public String getTaskDefName() { if (taskDefName == null || "".equals(taskDefName)) { taskDefName = taskType; } return taskDefName; } /** * @param taskDefName Name of the task definition */ public void setTaskDefName(String taskDefName) { this.taskDefName = taskDefName; } /** * @return the timeout for task to send response. After this timeout, the task will be re-queued */ public long getResponseTimeoutSeconds() { return responseTimeoutSeconds; } /** * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the * task will be re-queued */ public void setResponseTimeoutSeconds(long responseTimeoutSeconds) { this.responseTimeoutSeconds = responseTimeoutSeconds; } /** * @return the workflowInstanceId */ public String getWorkflowInstanceId() { return workflowInstanceId; } /** * @param workflowInstanceId the workflowInstanceId to set */ public void setWorkflowInstanceId(String workflowInstanceId) { this.workflowInstanceId = workflowInstanceId; } public String getWorkflowType() { return workflowType; } /** * @param workflowType the name of the workflow * @return the task object with the workflow type set */ public com.netflix.conductor.common.metadata.tasks.Task setWorkflowType(String workflowType) { this.workflowType = workflowType; return this; } /** * @return the taskId */ public String getTaskId() { return taskId; } /** * @param taskId the taskId to set */ public void setTaskId(String taskId) { this.taskId = taskId; } /** * @return the reasonForIncompletion */ public String getReasonForIncompletion() { return reasonForIncompletion; } /** * @param reasonForIncompletion the reasonForIncompletion to set */ public void setReasonForIncompletion(String reasonForIncompletion) { this.reasonForIncompletion = StringUtils.substring(reasonForIncompletion, 0, 500); } /** * @return the callbackAfterSeconds */ public long getCallbackAfterSeconds() { return callbackAfterSeconds; } /** * @param callbackAfterSeconds the callbackAfterSeconds to set */ public void setCallbackAfterSeconds(long callbackAfterSeconds) { this.callbackAfterSeconds = callbackAfterSeconds; } /** * @return the workerId */ public String getWorkerId() { return workerId; } /** * @param workerId the workerId to set */ public void setWorkerId(String workerId) { this.workerId = workerId; } /** * @return the outputData */ public Map<String, Object> getOutputData() { return outputData; } /** * @param outputData the outputData to set */ public void setOutputData(Map<String, Object> outputData) { if (outputData == null) { outputData = new HashMap<>(); } this.outputData = outputData; } /** * @return Workflow Task definition */ public WorkflowTask getWorkflowTask() { return workflowTask; } /** * @param workflowTask Task definition */ public void setWorkflowTask(WorkflowTask workflowTask) { this.workflowTask = workflowTask; } /** * @return the domain */ public String getDomain() { return domain; } /** * @param domain the Domain */ public void setDomain(String domain) { this.domain = domain; } public Any getInputMessage() { return inputMessage; } public void setInputMessage(Any inputMessage) { this.inputMessage = inputMessage; } public Any getOutputMessage() { return outputMessage; } public void setOutputMessage(Any outputMessage) { this.outputMessage = outputMessage; } /** * @return {@link Optional} containing the task definition if available */ public Optional<TaskDef> getTaskDefinition() { return Optional.ofNullable(this.getWorkflowTask()).map(WorkflowTask::getTaskDefinition); } public int getRateLimitPerFrequency() { return rateLimitPerFrequency; } public void setRateLimitPerFrequency(int rateLimitPerFrequency) { this.rateLimitPerFrequency = rateLimitPerFrequency; } public int getRateLimitFrequencyInSeconds() { return rateLimitFrequencyInSeconds; } public void setRateLimitFrequencyInSeconds(int rateLimitFrequencyInSeconds) { this.rateLimitFrequencyInSeconds = rateLimitFrequencyInSeconds; } /** * @return the external storage path for the task input payload */ public String getExternalInputPayloadStoragePath() { return externalInputPayloadStoragePath; } /** * @param externalInputPayloadStoragePath the external storage path where the task input payload * is stored */ public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; } /** * @return the external storage path for the task output payload */ public String getExternalOutputPayloadStoragePath() { return externalOutputPayloadStoragePath; } /** * @param externalOutputPayloadStoragePath the external storage path where the task output * payload is stored */ public void setExternalOutputPayloadStoragePath(String externalOutputPayloadStoragePath) { this.externalOutputPayloadStoragePath = externalOutputPayloadStoragePath; } public void setIsolationGroupId(String isolationGroupId) { this.isolationGroupId = isolationGroupId; } public String getIsolationGroupId() { return isolationGroupId; } public String getExecutionNameSpace() { return executionNameSpace; } public void setExecutionNameSpace(String executionNameSpace) { this.executionNameSpace = executionNameSpace; } /** * @return the iteration */ public int getIteration() { return iteration; } /** * @param iteration iteration */ public void setIteration(int iteration) { this.iteration = iteration; } public boolean isLoopOverTask() { return iteration > 0; } /** * @return the priority defined on workflow */ public int getWorkflowPriority() { return workflowPriority; } /** * @param workflowPriority Priority defined for workflow */ public void setWorkflowPriority(int workflowPriority) { this.workflowPriority = workflowPriority; } public boolean isSubworkflowChanged() { return subworkflowChanged; } public void setSubworkflowChanged(boolean subworkflowChanged) { this.subworkflowChanged = subworkflowChanged; } public String getSubWorkflowId() { // For backwards compatibility if (StringUtils.isNotBlank(subWorkflowId)) { return subWorkflowId; } else { return this.getOutputData() != null && this.getOutputData().get("subWorkflowId") != null ? (String) this.getOutputData().get("subWorkflowId") : this.getInputData() != null ? (String) this.getInputData().get("subWorkflowId") : null; } } public void setSubWorkflowId(String subWorkflowId) { this.subWorkflowId = subWorkflowId; // For backwards compatibility if (this.getOutputData() != null && this.getOutputData().containsKey("subWorkflowId")) { this.getOutputData().put("subWorkflowId", subWorkflowId); } } public Task copy() { Task copy = new Task(); copy.setCallbackAfterSeconds(callbackAfterSeconds); copy.setCallbackFromWorker(callbackFromWorker); copy.setCorrelationId(correlationId); copy.setInputData(inputData); copy.setOutputData(outputData); copy.setReferenceTaskName(referenceTaskName); copy.setStartDelayInSeconds(startDelayInSeconds); copy.setTaskDefName(taskDefName); copy.setTaskType(taskType); copy.setWorkflowInstanceId(workflowInstanceId); copy.setWorkflowType(workflowType); copy.setResponseTimeoutSeconds(responseTimeoutSeconds); copy.setStatus(status); copy.setRetryCount(retryCount); copy.setPollCount(pollCount); copy.setTaskId(taskId); copy.setWorkflowTask(workflowTask); copy.setDomain(domain); copy.setInputMessage(inputMessage); copy.setOutputMessage(outputMessage); copy.setRateLimitPerFrequency(rateLimitPerFrequency); copy.setRateLimitFrequencyInSeconds(rateLimitFrequencyInSeconds); copy.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath); copy.setExternalOutputPayloadStoragePath(externalOutputPayloadStoragePath); copy.setWorkflowPriority(workflowPriority); copy.setIteration(iteration); copy.setExecutionNameSpace(executionNameSpace); copy.setIsolationGroupId(isolationGroupId); copy.setSubWorkflowId(getSubWorkflowId()); copy.setSubworkflowChanged(subworkflowChanged); return copy; } /** * @return a deep copy of the task instance To be used inside copy Workflow method to provide a * valid deep copied object. Note: This does not copy the following fields: * <ul> * <li>retried * <li>updateTime * <li>retriedTaskId * </ul> */ public Task deepCopy() { Task deepCopy = copy(); deepCopy.setStartTime(startTime); deepCopy.setScheduledTime(scheduledTime); deepCopy.setEndTime(endTime); deepCopy.setWorkerId(workerId); deepCopy.setReasonForIncompletion(reasonForIncompletion); deepCopy.setSeq(seq); return deepCopy; } @Override public String toString() { return "Task{" + "taskType='" + taskType + '\'' + ", status=" + status + ", inputData=" + inputData + ", referenceTaskName='" + referenceTaskName + '\'' + ", retryCount=" + retryCount + ", seq=" + seq + ", correlationId='" + correlationId + '\'' + ", pollCount=" + pollCount + ", taskDefName='" + taskDefName + '\'' + ", scheduledTime=" + scheduledTime + ", startTime=" + startTime + ", endTime=" + endTime + ", updateTime=" + updateTime + ", startDelayInSeconds=" + startDelayInSeconds + ", retriedTaskId='" + retriedTaskId + '\'' + ", retried=" + retried + ", executed=" + executed + ", callbackFromWorker=" + callbackFromWorker + ", responseTimeoutSeconds=" + responseTimeoutSeconds + ", workflowInstanceId='" + workflowInstanceId + '\'' + ", workflowType='" + workflowType + '\'' + ", taskId='" + taskId + '\'' + ", reasonForIncompletion='" + reasonForIncompletion + '\'' + ", callbackAfterSeconds=" + callbackAfterSeconds + ", workerId='" + workerId + '\'' + ", outputData=" + outputData + ", workflowTask=" + workflowTask + ", domain='" + domain + '\'' + ", inputMessage='" + inputMessage + '\'' + ", outputMessage='" + outputMessage + '\'' + ", rateLimitPerFrequency=" + rateLimitPerFrequency + ", rateLimitFrequencyInSeconds=" + rateLimitFrequencyInSeconds + ", workflowPriority=" + workflowPriority + ", externalInputPayloadStoragePath='" + externalInputPayloadStoragePath + '\'' + ", externalOutputPayloadStoragePath='" + externalOutputPayloadStoragePath + '\'' + ", isolationGroupId='" + isolationGroupId + '\'' + ", executionNameSpace='" + executionNameSpace + '\'' + ", subworkflowChanged='" + subworkflowChanged + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Task task = (Task) o; return getRetryCount() == task.getRetryCount() && getSeq() == task.getSeq() && getPollCount() == task.getPollCount() && getScheduledTime() == task.getScheduledTime() && getStartTime() == task.getStartTime() && getEndTime() == task.getEndTime() && getUpdateTime() == task.getUpdateTime() && getStartDelayInSeconds() == task.getStartDelayInSeconds() && isRetried() == task.isRetried() && isExecuted() == task.isExecuted() && isCallbackFromWorker() == task.isCallbackFromWorker() && getResponseTimeoutSeconds() == task.getResponseTimeoutSeconds() && getCallbackAfterSeconds() == task.getCallbackAfterSeconds() && getRateLimitPerFrequency() == task.getRateLimitPerFrequency() && getRateLimitFrequencyInSeconds() == task.getRateLimitFrequencyInSeconds() && Objects.equals(getTaskType(), task.getTaskType()) && getStatus() == task.getStatus() && getIteration() == task.getIteration() && getWorkflowPriority() == task.getWorkflowPriority() && Objects.equals(getInputData(), task.getInputData()) && Objects.equals(getReferenceTaskName(), task.getReferenceTaskName()) && Objects.equals(getCorrelationId(), task.getCorrelationId()) && Objects.equals(getTaskDefName(), task.getTaskDefName()) && Objects.equals(getRetriedTaskId(), task.getRetriedTaskId()) && Objects.equals(getWorkflowInstanceId(), task.getWorkflowInstanceId()) && Objects.equals(getWorkflowType(), task.getWorkflowType()) && Objects.equals(getTaskId(), task.getTaskId()) && Objects.equals(getReasonForIncompletion(), task.getReasonForIncompletion()) && Objects.equals(getWorkerId(), task.getWorkerId()) && Objects.equals(getOutputData(), task.getOutputData()) && Objects.equals(getWorkflowTask(), task.getWorkflowTask()) && Objects.equals(getDomain(), task.getDomain()) && Objects.equals(getInputMessage(), task.getInputMessage()) && Objects.equals(getOutputMessage(), task.getOutputMessage()) && Objects.equals( getExternalInputPayloadStoragePath(), task.getExternalInputPayloadStoragePath()) && Objects.equals( getExternalOutputPayloadStoragePath(), task.getExternalOutputPayloadStoragePath()) && Objects.equals(getIsolationGroupId(), task.getIsolationGroupId()) && Objects.equals(getExecutionNameSpace(), task.getExecutionNameSpace()); } @Override public int hashCode() { return Objects.hash( getTaskType(), getStatus(), getInputData(), getReferenceTaskName(), getWorkflowPriority(), getRetryCount(), getSeq(), getCorrelationId(), getPollCount(), getTaskDefName(), getScheduledTime(), getStartTime(), getEndTime(), getUpdateTime(), getStartDelayInSeconds(), getRetriedTaskId(), isRetried(), isExecuted(), isCallbackFromWorker(), getResponseTimeoutSeconds(), getWorkflowInstanceId(), getWorkflowType(), getTaskId(), getReasonForIncompletion(), getCallbackAfterSeconds(), getWorkerId(), getOutputData(), getWorkflowTask(), getDomain(), getInputMessage(), getOutputMessage(), getRateLimitPerFrequency(), getRateLimitFrequencyInSeconds(), getExternalInputPayloadStoragePath(), getExternalOutputPayloadStoragePath(), getIsolationGroupId(), getExecutionNameSpace()); } }
6,913
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.HashMap; import java.util.Map; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.metadata.tasks.TaskType; @ProtoMessage public class DynamicForkJoinTask { @ProtoField(id = 1) private String taskName; @ProtoField(id = 2) private String workflowName; @ProtoField(id = 3) private String referenceName; @ProtoField(id = 4) private Map<String, Object> input = new HashMap<>(); @ProtoField(id = 5) private String type = TaskType.SIMPLE.name(); public DynamicForkJoinTask() {} public DynamicForkJoinTask( String taskName, String workflowName, String referenceName, Map<String, Object> input) { super(); this.taskName = taskName; this.workflowName = workflowName; this.referenceName = referenceName; this.input = input; } public DynamicForkJoinTask( String taskName, String workflowName, String referenceName, String type, Map<String, Object> input) { super(); this.taskName = taskName; this.workflowName = workflowName; this.referenceName = referenceName; this.input = input; this.type = type; } public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public String getWorkflowName() { return workflowName; } public void setWorkflowName(String workflowName) { this.workflowName = workflowName; } public String getReferenceName() { return referenceName; } public void setReferenceName(String referenceName) { this.referenceName = referenceName; } public Map<String, Object> getInput() { return input; } public void setInput(Map<String, Object> input) { this.input = input; } public String getType() { return type; } public void setType(String type) { this.type = type; } }
6,914
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.Map; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.google.protobuf.Any; import io.swagger.v3.oas.annotations.Hidden; @ProtoMessage(toProto = false) public class SkipTaskRequest { @ProtoField(id = 1) private Map<String, Object> taskInput; @ProtoField(id = 2) private Map<String, Object> taskOutput; @ProtoField(id = 3) @Hidden private Any taskInputMessage; @ProtoField(id = 4) @Hidden private Any taskOutputMessage; public Map<String, Object> getTaskInput() { return taskInput; } public void setTaskInput(Map<String, Object> taskInput) { this.taskInput = taskInput; } public Map<String, Object> getTaskOutput() { return taskOutput; } public void setTaskOutput(Map<String, Object> taskOutput) { this.taskOutput = taskOutput; } public Any getTaskInputMessage() { return taskInputMessage; } public void setTaskInputMessage(Any taskInputMessage) { this.taskInputMessage = taskInputMessage; } public Any getTaskOutputMessage() { return taskOutputMessage; } public void setTaskOutputMessage(Any taskOutputMessage) { this.taskOutputMessage = taskOutputMessage; } }
6,915
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.validation.Valid; import javax.validation.constraints.Email; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import com.netflix.conductor.annotations.protogen.ProtoEnum; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.constraints.NoSemiColonConstraint; import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint; import com.netflix.conductor.common.constraints.TaskReferenceNameUniqueConstraint; import com.netflix.conductor.common.metadata.BaseDef; import com.netflix.conductor.common.metadata.tasks.TaskType; @ProtoMessage @TaskReferenceNameUniqueConstraint public class WorkflowDef extends BaseDef { @ProtoEnum public enum TimeoutPolicy { TIME_OUT_WF, ALERT_ONLY } @NotEmpty(message = "WorkflowDef name cannot be null or empty") @ProtoField(id = 1) @NoSemiColonConstraint( message = "Workflow name cannot contain the following set of characters: ':'") private String name; @ProtoField(id = 2) private String description; @ProtoField(id = 3) private int version = 1; @ProtoField(id = 4) @NotNull @NotEmpty(message = "WorkflowTask list cannot be empty") private List<@Valid WorkflowTask> tasks = new LinkedList<>(); @ProtoField(id = 5) private List<String> inputParameters = new LinkedList<>(); @ProtoField(id = 6) private Map<String, Object> outputParameters = new HashMap<>(); @ProtoField(id = 7) private String failureWorkflow; @ProtoField(id = 8) @Min(value = 2, message = "workflowDef schemaVersion: {value} is only supported") @Max(value = 2, message = "workflowDef schemaVersion: {value} is only supported") private int schemaVersion = 2; // By default, a workflow is restartable @ProtoField(id = 9) private boolean restartable = true; @ProtoField(id = 10) private boolean workflowStatusListenerEnabled = false; @ProtoField(id = 11) @OwnerEmailMandatoryConstraint @Email(message = "ownerEmail should be valid email address") private String ownerEmail; @ProtoField(id = 12) private TimeoutPolicy timeoutPolicy = TimeoutPolicy.ALERT_ONLY; @ProtoField(id = 13) @NotNull private long timeoutSeconds; @ProtoField(id = 14) private Map<String, Object> variables = new HashMap<>(); @ProtoField(id = 15) private Map<String, Object> inputTemplate = new HashMap<>(); /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the tasks */ public List<WorkflowTask> getTasks() { return tasks; } /** * @param tasks the tasks to set */ public void setTasks(List<@Valid WorkflowTask> tasks) { this.tasks = tasks; } /** * @return the inputParameters */ public List<String> getInputParameters() { return inputParameters; } /** * @param inputParameters the inputParameters to set */ public void setInputParameters(List<String> inputParameters) { this.inputParameters = inputParameters; } /** * @return the outputParameters */ public Map<String, Object> getOutputParameters() { return outputParameters; } /** * @param outputParameters the outputParameters to set */ public void setOutputParameters(Map<String, Object> outputParameters) { this.outputParameters = outputParameters; } /** * @return the version */ public int getVersion() { return version; } /** * @return the failureWorkflow */ public String getFailureWorkflow() { return failureWorkflow; } /** * @param failureWorkflow the failureWorkflow to set */ public void setFailureWorkflow(String failureWorkflow) { this.failureWorkflow = failureWorkflow; } /** * @param version the version to set */ public void setVersion(int version) { this.version = version; } /** * This method determines if the workflow is restartable or not * * @return true: if the workflow is restartable false: if the workflow is non restartable */ public boolean isRestartable() { return restartable; } /** * This method is called only when the workflow definition is created * * @param restartable true: if the workflow is restartable false: if the workflow is non * restartable */ public void setRestartable(boolean restartable) { this.restartable = restartable; } /** * @return the schemaVersion */ public int getSchemaVersion() { return schemaVersion; } /** * @param schemaVersion the schemaVersion to set */ public void setSchemaVersion(int schemaVersion) { this.schemaVersion = schemaVersion; } /** * @return true is workflow listener will be invoked when workflow gets into a terminal state */ public boolean isWorkflowStatusListenerEnabled() { return workflowStatusListenerEnabled; } /** * Specify if workflow listener is enabled to invoke a callback for completed or terminated * workflows * * @param workflowStatusListenerEnabled */ public void setWorkflowStatusListenerEnabled(boolean workflowStatusListenerEnabled) { this.workflowStatusListenerEnabled = workflowStatusListenerEnabled; } /** * @return the email of the owner of this workflow definition */ public String getOwnerEmail() { return ownerEmail; } /** * @param ownerEmail the owner email to set */ public void setOwnerEmail(String ownerEmail) { this.ownerEmail = ownerEmail; } /** * @return the timeoutPolicy */ public TimeoutPolicy getTimeoutPolicy() { return timeoutPolicy; } /** * @param timeoutPolicy the timeoutPolicy to set */ public void setTimeoutPolicy(TimeoutPolicy timeoutPolicy) { this.timeoutPolicy = timeoutPolicy; } /** * @return the time after which a workflow is deemed to have timed out */ public long getTimeoutSeconds() { return timeoutSeconds; } /** * @param timeoutSeconds the timeout in seconds to set */ public void setTimeoutSeconds(long timeoutSeconds) { this.timeoutSeconds = timeoutSeconds; } /** * @return the global workflow variables */ public Map<String, Object> getVariables() { return variables; } /** * @param variables the set of global workflow variables to set */ public void setVariables(Map<String, Object> variables) { this.variables = variables; } public Map<String, Object> getInputTemplate() { return inputTemplate; } public void setInputTemplate(Map<String, Object> inputTemplate) { this.inputTemplate = inputTemplate; } public String key() { return getKey(name, version); } public static String getKey(String name, int version) { return name + "." + version; } public boolean containsType(String taskType) { return collectTasks().stream().anyMatch(t -> t.getType().equals(taskType)); } public WorkflowTask getNextTask(String taskReferenceName) { WorkflowTask workflowTask = getTaskByRefName(taskReferenceName); if (workflowTask != null && TaskType.TERMINATE.name().equals(workflowTask.getType())) { return null; } Iterator<WorkflowTask> iterator = tasks.iterator(); while (iterator.hasNext()) { WorkflowTask task = iterator.next(); if (task.getTaskReferenceName().equals(taskReferenceName)) { // If taskReferenceName matches, break out break; } WorkflowTask nextTask = task.next(taskReferenceName, null); if (nextTask != null) { return nextTask; } else if (TaskType.DO_WHILE.name().equals(task.getType()) && !task.getTaskReferenceName().equals(taskReferenceName) && task.has(taskReferenceName)) { // If the task is child of Loop Task and at last position, return null. return null; } if (task.has(taskReferenceName)) { break; } } if (iterator.hasNext()) { return iterator.next(); } return null; } public WorkflowTask getTaskByRefName(String taskReferenceName) { return collectTasks().stream() .filter( workflowTask -> workflowTask.getTaskReferenceName().equals(taskReferenceName)) .findFirst() .orElse(null); } public List<WorkflowTask> collectTasks() { List<WorkflowTask> tasks = new LinkedList<>(); for (WorkflowTask workflowTask : this.tasks) { tasks.addAll(workflowTask.collectTasks()); } return tasks; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WorkflowDef that = (WorkflowDef) o; return getVersion() == that.getVersion() && getSchemaVersion() == that.getSchemaVersion() && Objects.equals(getName(), that.getName()) && Objects.equals(getDescription(), that.getDescription()) && Objects.equals(getTasks(), that.getTasks()) && Objects.equals(getInputParameters(), that.getInputParameters()) && Objects.equals(getOutputParameters(), that.getOutputParameters()) && Objects.equals(getFailureWorkflow(), that.getFailureWorkflow()) && Objects.equals(getOwnerEmail(), that.getOwnerEmail()) && Objects.equals(getTimeoutSeconds(), that.getTimeoutSeconds()); } @Override public int hashCode() { return Objects.hash( getName(), getDescription(), getVersion(), getTasks(), getInputParameters(), getOutputParameters(), getFailureWorkflow(), getSchemaVersion(), getOwnerEmail(), getTimeoutSeconds()); } @Override public String toString() { return "WorkflowDef{" + "name='" + name + '\'' + ", description='" + description + '\'' + ", version=" + version + ", tasks=" + tasks + ", inputParameters=" + inputParameters + ", outputParameters=" + outputParameters + ", failureWorkflow='" + failureWorkflow + '\'' + ", schemaVersion=" + schemaVersion + ", restartable=" + restartable + ", workflowStatusListenerEnabled=" + workflowStatusListenerEnabled + ", timeoutSeconds=" + timeoutSeconds + '}'; } }
6,916
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; @ProtoMessage public class DynamicForkJoinTaskList { @ProtoField(id = 1) private List<DynamicForkJoinTask> dynamicTasks = new ArrayList<>(); public void add( String taskName, String workflowName, String referenceName, Map<String, Object> input) { dynamicTasks.add(new DynamicForkJoinTask(taskName, workflowName, referenceName, input)); } public void add(DynamicForkJoinTask dtask) { dynamicTasks.add(dtask); } public List<DynamicForkJoinTask> getDynamicTasks() { return dynamicTasks; } public void setDynamicTasks(List<DynamicForkJoinTask> dynamicTasks) { this.dynamicTasks = dynamicTasks; } }
6,917
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.HashMap; import java.util.Map; import javax.validation.Valid; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; @ProtoMessage public class StartWorkflowRequest { @ProtoField(id = 1) @NotNull(message = "Workflow name cannot be null or empty") private String name; @ProtoField(id = 2) private Integer version; @ProtoField(id = 3) private String correlationId; @ProtoField(id = 4) private Map<String, Object> input = new HashMap<>(); @ProtoField(id = 5) private Map<String, String> taskToDomain = new HashMap<>(); @ProtoField(id = 6) @Valid private WorkflowDef workflowDef; @ProtoField(id = 7) private String externalInputPayloadStoragePath; @ProtoField(id = 8) @Min(value = 0, message = "priority: ${validatedValue} should be minimum {value}") @Max(value = 99, message = "priority: ${validatedValue} should be maximum {value}") private Integer priority = 0; public String getName() { return name; } public void setName(String name) { this.name = name; } public StartWorkflowRequest withName(String name) { this.name = name; return this; } public Integer getVersion() { return version; } public void setVersion(Integer version) { this.version = version; } public StartWorkflowRequest withVersion(Integer version) { this.version = version; return this; } public String getCorrelationId() { return correlationId; } public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } public StartWorkflowRequest withCorrelationId(String correlationId) { this.correlationId = correlationId; return this; } public String getExternalInputPayloadStoragePath() { return externalInputPayloadStoragePath; } public void setExternalInputPayloadStoragePath(String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; } public StartWorkflowRequest withExternalInputPayloadStoragePath( String externalInputPayloadStoragePath) { this.externalInputPayloadStoragePath = externalInputPayloadStoragePath; return this; } public Integer getPriority() { return priority; } public void setPriority(Integer priority) { this.priority = priority; } public StartWorkflowRequest withPriority(Integer priority) { this.priority = priority; return this; } public Map<String, Object> getInput() { return input; } public void setInput(Map<String, Object> input) { this.input = input; } public StartWorkflowRequest withInput(Map<String, Object> input) { this.input = input; return this; } public Map<String, String> getTaskToDomain() { return taskToDomain; } public void setTaskToDomain(Map<String, String> taskToDomain) { this.taskToDomain = taskToDomain; } public StartWorkflowRequest withTaskToDomain(Map<String, String> taskToDomain) { this.taskToDomain = taskToDomain; return this; } public WorkflowDef getWorkflowDef() { return workflowDef; } public void setWorkflowDef(WorkflowDef workflowDef) { this.workflowDef = workflowDef; } public StartWorkflowRequest withWorkflowDef(WorkflowDef workflowDef) { this.workflowDef = workflowDef; return this; } }
6,918
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.Map; import java.util.Objects; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.fasterxml.jackson.annotation.JsonGetter; import com.fasterxml.jackson.annotation.JsonSetter; @ProtoMessage public class SubWorkflowParams { @ProtoField(id = 1) @NotNull(message = "SubWorkflowParams name cannot be null") @NotEmpty(message = "SubWorkflowParams name cannot be empty") private String name; @ProtoField(id = 2) private Integer version; @ProtoField(id = 3) private Map<String, String> taskToDomain; // workaround as WorkflowDef cannot directly be used due to cyclic dependency issue in protobuf // imports @ProtoField(id = 4) private Object workflowDefinition; /** * @return the name */ public String getName() { if (workflowDefinition != null) { return getWorkflowDef().getName(); } else { return name; } } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the version */ public Integer getVersion() { if (workflowDefinition != null) { return getWorkflowDef().getVersion(); } else { return version; } } /** * @param version the version to set */ public void setVersion(Integer version) { this.version = version; } /** * @return the taskToDomain */ public Map<String, String> getTaskToDomain() { return taskToDomain; } /** * @param taskToDomain the taskToDomain to set */ public void setTaskToDomain(Map<String, String> taskToDomain) { this.taskToDomain = taskToDomain; } /** * @return the workflowDefinition as an Object */ public Object getWorkflowDefinition() { return workflowDefinition; } /** * @return the workflowDefinition as a WorkflowDef */ @JsonGetter("workflowDefinition") public WorkflowDef getWorkflowDef() { return (WorkflowDef) workflowDefinition; } /** * @param workflowDef the workflowDefinition to set */ public void setWorkflowDefinition(Object workflowDef) { if (!(workflowDef == null || workflowDef instanceof WorkflowDef)) { throw new IllegalArgumentException( "workflowDefinition must be either null or WorkflowDef"); } this.workflowDefinition = workflowDef; } /** * @param workflowDef the workflowDefinition to set */ @JsonSetter("workflowDefinition") public void setWorkflowDef(WorkflowDef workflowDef) { this.workflowDefinition = workflowDef; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SubWorkflowParams that = (SubWorkflowParams) o; return Objects.equals(getName(), that.getName()) && Objects.equals(getVersion(), that.getVersion()) && Objects.equals(getTaskToDomain(), that.getTaskToDomain()) && Objects.equals(getWorkflowDefinition(), that.getWorkflowDefinition()); } }
6,919
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.validation.Valid; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.PositiveOrZero; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.fasterxml.jackson.annotation.JsonInclude; /** * This is the task definition definied as part of the {@link WorkflowDef}. The tasks definied in * the Workflow definition are saved as part of {@link WorkflowDef#getTasks} */ @ProtoMessage public class WorkflowTask { @ProtoField(id = 1) @NotEmpty(message = "WorkflowTask name cannot be empty or null") private String name; @ProtoField(id = 2) @NotEmpty(message = "WorkflowTask taskReferenceName name cannot be empty or null") private String taskReferenceName; @ProtoField(id = 3) private String description; @ProtoField(id = 4) private Map<String, Object> inputParameters = new HashMap<>(); @ProtoField(id = 5) private String type = TaskType.SIMPLE.name(); @ProtoField(id = 6) private String dynamicTaskNameParam; @Deprecated @ProtoField(id = 7) private String caseValueParam; @Deprecated @ProtoField(id = 8) private String caseExpression; @ProtoField(id = 22) private String scriptExpression; @ProtoMessage(wrapper = true) public static class WorkflowTaskList { public List<WorkflowTask> getTasks() { return tasks; } public void setTasks(List<WorkflowTask> tasks) { this.tasks = tasks; } @ProtoField(id = 1) private List<WorkflowTask> tasks; } // Populates for the tasks of the decision type @ProtoField(id = 9) @JsonInclude(JsonInclude.Include.NON_EMPTY) private Map<String, @Valid List<@Valid WorkflowTask>> decisionCases = new LinkedHashMap<>(); @Deprecated private String dynamicForkJoinTasksParam; @ProtoField(id = 10) private String dynamicForkTasksParam; @ProtoField(id = 11) private String dynamicForkTasksInputParamName; @ProtoField(id = 12) @JsonInclude(JsonInclude.Include.NON_EMPTY) private List<@Valid WorkflowTask> defaultCase = new LinkedList<>(); @ProtoField(id = 13) @JsonInclude(JsonInclude.Include.NON_EMPTY) private List<@Valid List<@Valid WorkflowTask>> forkTasks = new LinkedList<>(); @ProtoField(id = 14) @PositiveOrZero private int startDelay; // No. of seconds (at-least) to wait before starting a task. @ProtoField(id = 15) @Valid private SubWorkflowParams subWorkflowParam; @ProtoField(id = 16) @JsonInclude(JsonInclude.Include.NON_EMPTY) private List<String> joinOn = new LinkedList<>(); @ProtoField(id = 17) private String sink; @ProtoField(id = 18) private boolean optional = false; @ProtoField(id = 19) private TaskDef taskDefinition; @ProtoField(id = 20) private Boolean rateLimited; @ProtoField(id = 21) @JsonInclude(JsonInclude.Include.NON_EMPTY) private List<String> defaultExclusiveJoinTask = new LinkedList<>(); @ProtoField(id = 23) private Boolean asyncComplete = false; @ProtoField(id = 24) private String loopCondition; @ProtoField(id = 25) @JsonInclude(JsonInclude.Include.NON_EMPTY) private List<WorkflowTask> loopOver = new LinkedList<>(); @ProtoField(id = 26) private Integer retryCount; @ProtoField(id = 27) private String evaluatorType; @ProtoField(id = 28) private String expression; /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the taskReferenceName */ public String getTaskReferenceName() { return taskReferenceName; } /** * @param taskReferenceName the taskReferenceName to set */ public void setTaskReferenceName(String taskReferenceName) { this.taskReferenceName = taskReferenceName; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the inputParameters */ public Map<String, Object> getInputParameters() { return inputParameters; } /** * @param inputParameters the inputParameters to set */ public void setInputParameters(Map<String, Object> inputParameters) { this.inputParameters = inputParameters; } /** * @return the type */ public String getType() { return type; } public void setWorkflowTaskType(TaskType type) { this.type = type.name(); } /** * @param type the type to set */ public void setType(@NotEmpty(message = "WorkTask type cannot be null or empty") String type) { this.type = type; } /** * @return the decisionCases */ public Map<String, List<WorkflowTask>> getDecisionCases() { return decisionCases; } /** * @param decisionCases the decisionCases to set */ public void setDecisionCases(Map<String, List<WorkflowTask>> decisionCases) { this.decisionCases = decisionCases; } /** * @return the defaultCase */ public List<WorkflowTask> getDefaultCase() { return defaultCase; } /** * @param defaultCase the defaultCase to set */ public void setDefaultCase(List<WorkflowTask> defaultCase) { this.defaultCase = defaultCase; } /** * @return the forkTasks */ public List<List<WorkflowTask>> getForkTasks() { return forkTasks; } /** * @param forkTasks the forkTasks to set */ public void setForkTasks(List<List<WorkflowTask>> forkTasks) { this.forkTasks = forkTasks; } /** * @return the startDelay in seconds */ public int getStartDelay() { return startDelay; } /** * @param startDelay the startDelay to set */ public void setStartDelay(int startDelay) { this.startDelay = startDelay; } /** * @return the retryCount */ public Integer getRetryCount() { return retryCount; } /** * @param retryCount the retryCount to set */ public void setRetryCount(final Integer retryCount) { this.retryCount = retryCount; } /** * @return the dynamicTaskNameParam */ public String getDynamicTaskNameParam() { return dynamicTaskNameParam; } /** * @param dynamicTaskNameParam the dynamicTaskNameParam to set to be used by DYNAMIC tasks */ public void setDynamicTaskNameParam(String dynamicTaskNameParam) { this.dynamicTaskNameParam = dynamicTaskNameParam; } /** * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link * WorkflowTask#getExpression()} combination. * @return the caseValueParam */ @Deprecated public String getCaseValueParam() { return caseValueParam; } @Deprecated public String getDynamicForkJoinTasksParam() { return dynamicForkJoinTasksParam; } @Deprecated public void setDynamicForkJoinTasksParam(String dynamicForkJoinTasksParam) { this.dynamicForkJoinTasksParam = dynamicForkJoinTasksParam; } public String getDynamicForkTasksParam() { return dynamicForkTasksParam; } public void setDynamicForkTasksParam(String dynamicForkTasksParam) { this.dynamicForkTasksParam = dynamicForkTasksParam; } public String getDynamicForkTasksInputParamName() { return dynamicForkTasksInputParamName; } public void setDynamicForkTasksInputParamName(String dynamicForkTasksInputParamName) { this.dynamicForkTasksInputParamName = dynamicForkTasksInputParamName; } /** * @param caseValueParam the caseValueParam to set * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link * WorkflowTask#getExpression()} combination. */ @Deprecated public void setCaseValueParam(String caseValueParam) { this.caseValueParam = caseValueParam; } /** * @return A javascript expression for decision cases. The result should be a scalar value that * is used to decide the case branches. * @see #getDecisionCases() * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link * WorkflowTask#getExpression()} combination. */ @Deprecated public String getCaseExpression() { return caseExpression; } /** * @param caseExpression A javascript expression for decision cases. The result should be a * scalar value that is used to decide the case branches. * @deprecated Use {@link WorkflowTask#getEvaluatorType()} and {@link * WorkflowTask#getExpression()} combination. */ @Deprecated public void setCaseExpression(String caseExpression) { this.caseExpression = caseExpression; } public String getScriptExpression() { return scriptExpression; } public void setScriptExpression(String expression) { this.scriptExpression = expression; } /** * @return the subWorkflow */ public SubWorkflowParams getSubWorkflowParam() { return subWorkflowParam; } /** * @param subWorkflow the subWorkflowParam to set */ public void setSubWorkflowParam(SubWorkflowParams subWorkflow) { this.subWorkflowParam = subWorkflow; } /** * @return the joinOn */ public List<String> getJoinOn() { return joinOn; } /** * @param joinOn the joinOn to set */ public void setJoinOn(List<String> joinOn) { this.joinOn = joinOn; } /** * @return the loopCondition */ public String getLoopCondition() { return loopCondition; } /** * @param loopCondition the expression to set */ public void setLoopCondition(String loopCondition) { this.loopCondition = loopCondition; } /** * @return the loopOver */ public List<WorkflowTask> getLoopOver() { return loopOver; } /** * @param loopOver the loopOver to set */ public void setLoopOver(List<WorkflowTask> loopOver) { this.loopOver = loopOver; } /** * @return Sink value for the EVENT type of task */ public String getSink() { return sink; } /** * @param sink Name of the sink */ public void setSink(String sink) { this.sink = sink; } /** * @return whether wait for an external event to complete the task, for EVENT and HTTP tasks */ public Boolean isAsyncComplete() { return asyncComplete; } public void setAsyncComplete(Boolean asyncComplete) { this.asyncComplete = asyncComplete; } /** * @return If the task is optional. When set to true, the workflow execution continues even when * the task is in failed status. */ public boolean isOptional() { return optional; } /** * @return Task definition associated to the Workflow Task */ public TaskDef getTaskDefinition() { return taskDefinition; } /** * @param taskDefinition Task definition */ public void setTaskDefinition(TaskDef taskDefinition) { this.taskDefinition = taskDefinition; } /** * @param optional when set to true, the task is marked as optional */ public void setOptional(boolean optional) { this.optional = optional; } public Boolean getRateLimited() { return rateLimited; } public void setRateLimited(Boolean rateLimited) { this.rateLimited = rateLimited; } public Boolean isRateLimited() { return rateLimited != null && rateLimited; } public List<String> getDefaultExclusiveJoinTask() { return defaultExclusiveJoinTask; } public void setDefaultExclusiveJoinTask(List<String> defaultExclusiveJoinTask) { this.defaultExclusiveJoinTask = defaultExclusiveJoinTask; } /** * @return the evaluatorType */ public String getEvaluatorType() { return evaluatorType; } /** * @param evaluatorType the evaluatorType to set */ public void setEvaluatorType(String evaluatorType) { this.evaluatorType = evaluatorType; } /** * @return An evaluation expression for switch cases evaluated by corresponding evaluator. The * result should be a scalar value that is used to decide the case branches. * @see #getDecisionCases() */ public String getExpression() { return expression; } /** * @param expression the expression to set */ public void setExpression(String expression) { this.expression = expression; } private Collection<List<WorkflowTask>> children() { Collection<List<WorkflowTask>> workflowTaskLists = new LinkedList<>(); switch (TaskType.of(type)) { case DECISION: case SWITCH: workflowTaskLists.addAll(decisionCases.values()); workflowTaskLists.add(defaultCase); break; case FORK_JOIN: workflowTaskLists.addAll(forkTasks); break; case DO_WHILE: workflowTaskLists.add(loopOver); break; default: break; } return workflowTaskLists; } public List<WorkflowTask> collectTasks() { List<WorkflowTask> tasks = new LinkedList<>(); tasks.add(this); for (List<WorkflowTask> workflowTaskList : children()) { for (WorkflowTask workflowTask : workflowTaskList) { tasks.addAll(workflowTask.collectTasks()); } } return tasks; } public WorkflowTask next(String taskReferenceName, WorkflowTask parent) { TaskType taskType = TaskType.of(type); switch (taskType) { case DO_WHILE: case DECISION: case SWITCH: for (List<WorkflowTask> workflowTasks : children()) { Iterator<WorkflowTask> iterator = workflowTasks.iterator(); while (iterator.hasNext()) { WorkflowTask task = iterator.next(); if (task.getTaskReferenceName().equals(taskReferenceName)) { break; } WorkflowTask nextTask = task.next(taskReferenceName, this); if (nextTask != null) { return nextTask; } if (task.has(taskReferenceName)) { break; } } if (iterator.hasNext()) { return iterator.next(); } } if (taskType == TaskType.DO_WHILE && this.has(taskReferenceName)) { // come here means this is DO_WHILE task and `taskReferenceName` is the last // task in // this DO_WHILE task, because DO_WHILE task need to be executed to decide // whether to // schedule next iteration, so we just return the DO_WHILE task, and then ignore // generating this task again in deciderService.getNextTask() return this; } break; case FORK_JOIN: boolean found = false; for (List<WorkflowTask> workflowTasks : children()) { Iterator<WorkflowTask> iterator = workflowTasks.iterator(); while (iterator.hasNext()) { WorkflowTask task = iterator.next(); if (task.getTaskReferenceName().equals(taskReferenceName)) { found = true; break; } WorkflowTask nextTask = task.next(taskReferenceName, this); if (nextTask != null) { return nextTask; } if (task.has(taskReferenceName)) { break; } } if (iterator.hasNext()) { return iterator.next(); } if (found && parent != null) { return parent.next( this.taskReferenceName, parent); // we need to return join task... -- get my sibling from my // parent.. } } break; case DYNAMIC: case TERMINATE: case SIMPLE: return null; default: break; } return null; } public boolean has(String taskReferenceName) { if (this.getTaskReferenceName().equals(taskReferenceName)) { return true; } switch (TaskType.of(type)) { case DECISION: case SWITCH: case DO_WHILE: case FORK_JOIN: for (List<WorkflowTask> childx : children()) { for (WorkflowTask child : childx) { if (child.has(taskReferenceName)) { return true; } } } break; default: break; } return false; } public WorkflowTask get(String taskReferenceName) { if (this.getTaskReferenceName().equals(taskReferenceName)) { return this; } for (List<WorkflowTask> childx : children()) { for (WorkflowTask child : childx) { WorkflowTask found = child.get(taskReferenceName); if (found != null) { return found; } } } return null; } @Override public String toString() { return name + "/" + taskReferenceName; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WorkflowTask that = (WorkflowTask) o; return getStartDelay() == that.getStartDelay() && isOptional() == that.isOptional() && Objects.equals(getName(), that.getName()) && Objects.equals(getTaskReferenceName(), that.getTaskReferenceName()) && Objects.equals(getDescription(), that.getDescription()) && Objects.equals(getInputParameters(), that.getInputParameters()) && Objects.equals(getType(), that.getType()) && Objects.equals(getDynamicTaskNameParam(), that.getDynamicTaskNameParam()) && Objects.equals(getCaseValueParam(), that.getCaseValueParam()) && Objects.equals(getEvaluatorType(), that.getEvaluatorType()) && Objects.equals(getExpression(), that.getExpression()) && Objects.equals(getCaseExpression(), that.getCaseExpression()) && Objects.equals(getDecisionCases(), that.getDecisionCases()) && Objects.equals( getDynamicForkJoinTasksParam(), that.getDynamicForkJoinTasksParam()) && Objects.equals(getDynamicForkTasksParam(), that.getDynamicForkTasksParam()) && Objects.equals( getDynamicForkTasksInputParamName(), that.getDynamicForkTasksInputParamName()) && Objects.equals(getDefaultCase(), that.getDefaultCase()) && Objects.equals(getForkTasks(), that.getForkTasks()) && Objects.equals(getSubWorkflowParam(), that.getSubWorkflowParam()) && Objects.equals(getJoinOn(), that.getJoinOn()) && Objects.equals(getSink(), that.getSink()) && Objects.equals(isAsyncComplete(), that.isAsyncComplete()) && Objects.equals(getDefaultExclusiveJoinTask(), that.getDefaultExclusiveJoinTask()) && Objects.equals(getRetryCount(), that.getRetryCount()); } @Override public int hashCode() { return Objects.hash( getName(), getTaskReferenceName(), getDescription(), getInputParameters(), getType(), getDynamicTaskNameParam(), getCaseValueParam(), getCaseExpression(), getEvaluatorType(), getExpression(), getDecisionCases(), getDynamicForkJoinTasksParam(), getDynamicForkTasksParam(), getDynamicForkTasksInputParamName(), getDefaultCase(), getForkTasks(), getStartDelay(), getSubWorkflowParam(), getJoinOn(), getSink(), isAsyncComplete(), isOptional(), getDefaultExclusiveJoinTask(), getRetryCount()); } }
6,920
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDefSummary.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.Objects; import javax.validation.constraints.NotEmpty; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.constraints.NoSemiColonConstraint; @ProtoMessage public class WorkflowDefSummary implements Comparable<WorkflowDefSummary> { @NotEmpty(message = "WorkflowDef name cannot be null or empty") @ProtoField(id = 1) @NoSemiColonConstraint( message = "Workflow name cannot contain the following set of characters: ':'") private String name; @ProtoField(id = 2) private int version = 1; @ProtoField(id = 3) private Long createTime; /** * @return the version */ public int getVersion() { return version; } /** * @return the workflow name */ public String getName() { return name; } /** * @return the createTime */ public Long getCreateTime() { return createTime; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WorkflowDefSummary that = (WorkflowDefSummary) o; return getVersion() == that.getVersion() && Objects.equals(getName(), that.getName()); } public void setName(String name) { this.name = name; } public void setVersion(int version) { this.version = version; } public void setCreateTime(Long createTime) { this.createTime = createTime; } @Override public int hashCode() { return Objects.hash(getName(), getVersion()); } @Override public String toString() { return "WorkflowDef{name='" + name + ", version=" + version + "}"; } @Override public int compareTo(WorkflowDefSummary o) { int res = this.name.compareTo(o.name); if (res != 0) { return res; } res = Integer.compare(this.version, o.version); return res; } }
6,921
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import java.util.Map; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; @ProtoMessage public class RerunWorkflowRequest { @ProtoField(id = 1) private String reRunFromWorkflowId; @ProtoField(id = 2) private Map<String, Object> workflowInput; @ProtoField(id = 3) private String reRunFromTaskId; @ProtoField(id = 4) private Map<String, Object> taskInput; @ProtoField(id = 5) private String correlationId; public String getReRunFromWorkflowId() { return reRunFromWorkflowId; } public void setReRunFromWorkflowId(String reRunFromWorkflowId) { this.reRunFromWorkflowId = reRunFromWorkflowId; } public Map<String, Object> getWorkflowInput() { return workflowInput; } public void setWorkflowInput(Map<String, Object> workflowInput) { this.workflowInput = workflowInput; } public String getReRunFromTaskId() { return reRunFromTaskId; } public void setReRunFromTaskId(String reRunFromTaskId) { this.reRunFromTaskId = reRunFromTaskId; } public Map<String, Object> getTaskInput() { return taskInput; } public void setTaskInput(Map<String, Object> taskInput) { this.taskInput = taskInput; } public String getCorrelationId() { return correlationId; } public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } }
6,922
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.events; import java.util.HashMap; import java.util.Map; import java.util.Objects; import com.netflix.conductor.annotations.protogen.ProtoEnum; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.netflix.conductor.common.metadata.events.EventHandler.Action; @ProtoMessage public class EventExecution { @ProtoEnum public enum Status { IN_PROGRESS, COMPLETED, FAILED, SKIPPED } @ProtoField(id = 1) private String id; @ProtoField(id = 2) private String messageId; @ProtoField(id = 3) private String name; @ProtoField(id = 4) private String event; @ProtoField(id = 5) private long created; @ProtoField(id = 6) private Status status; @ProtoField(id = 7) private Action.Type action; @ProtoField(id = 8) private Map<String, Object> output = new HashMap<>(); public EventExecution() {} public EventExecution(String id, String messageId) { this.id = id; this.messageId = messageId; } /** * @return the id */ public String getId() { return id; } /** * @param id the id to set */ public void setId(String id) { this.id = id; } /** * @return the messageId */ public String getMessageId() { return messageId; } /** * @param messageId the messageId to set */ public void setMessageId(String messageId) { this.messageId = messageId; } /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the event */ public String getEvent() { return event; } /** * @param event the event to set */ public void setEvent(String event) { this.event = event; } /** * @return the created */ public long getCreated() { return created; } /** * @param created the created to set */ public void setCreated(long created) { this.created = created; } /** * @return the status */ public Status getStatus() { return status; } /** * @param status the status to set */ public void setStatus(Status status) { this.status = status; } /** * @return the action */ public Action.Type getAction() { return action; } /** * @param action the action to set */ public void setAction(Action.Type action) { this.action = action; } /** * @return the output */ public Map<String, Object> getOutput() { return output; } /** * @param output the output to set */ public void setOutput(Map<String, Object> output) { this.output = output; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventExecution execution = (EventExecution) o; return created == execution.created && Objects.equals(id, execution.id) && Objects.equals(messageId, execution.messageId) && Objects.equals(name, execution.name) && Objects.equals(event, execution.event) && status == execution.status && action == execution.action && Objects.equals(output, execution.output); } @Override public int hashCode() { return Objects.hash(id, messageId, name, event, created, status, action, output); } }
6,923
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.events; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.validation.Valid; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; import com.netflix.conductor.annotations.protogen.ProtoEnum; import com.netflix.conductor.annotations.protogen.ProtoField; import com.netflix.conductor.annotations.protogen.ProtoMessage; import com.google.protobuf.Any; import io.swagger.v3.oas.annotations.Hidden; /** Defines an event handler */ @ProtoMessage public class EventHandler { @ProtoField(id = 1) @NotEmpty(message = "Missing event handler name") private String name; @ProtoField(id = 2) @NotEmpty(message = "Missing event location") private String event; @ProtoField(id = 3) private String condition; @ProtoField(id = 4) @NotNull @NotEmpty(message = "No actions specified. Please specify at-least one action") private List<@Valid Action> actions = new LinkedList<>(); @ProtoField(id = 5) private boolean active; @ProtoField(id = 6) private String evaluatorType; public EventHandler() {} /** * @return the name MUST be unique within a conductor instance */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the event */ public String getEvent() { return event; } /** * @param event the event to set */ public void setEvent(String event) { this.event = event; } /** * @return the condition */ public String getCondition() { return condition; } /** * @param condition the condition to set */ public void setCondition(String condition) { this.condition = condition; } /** * @return the actions */ public List<Action> getActions() { return actions; } /** * @param actions the actions to set */ public void setActions(List<Action> actions) { this.actions = actions; } /** * @return the active */ public boolean isActive() { return active; } /** * @param active if set to false, the event handler is deactivated */ public void setActive(boolean active) { this.active = active; } /** * @return the evaluator type */ public String getEvaluatorType() { return evaluatorType; } /** * @param evaluatorType the evaluatorType to set */ public void setEvaluatorType(String evaluatorType) { this.evaluatorType = evaluatorType; } @ProtoMessage public static class Action { @ProtoEnum public enum Type { start_workflow, complete_task, fail_task } @ProtoField(id = 1) private Type action; @ProtoField(id = 2) private StartWorkflow start_workflow; @ProtoField(id = 3) private TaskDetails complete_task; @ProtoField(id = 4) private TaskDetails fail_task; @ProtoField(id = 5) private boolean expandInlineJSON; /** * @return the action */ public Type getAction() { return action; } /** * @param action the action to set */ public void setAction(Type action) { this.action = action; } /** * @return the start_workflow */ public StartWorkflow getStart_workflow() { return start_workflow; } /** * @param start_workflow the start_workflow to set */ public void setStart_workflow(StartWorkflow start_workflow) { this.start_workflow = start_workflow; } /** * @return the complete_task */ public TaskDetails getComplete_task() { return complete_task; } /** * @param complete_task the complete_task to set */ public void setComplete_task(TaskDetails complete_task) { this.complete_task = complete_task; } /** * @return the fail_task */ public TaskDetails getFail_task() { return fail_task; } /** * @param fail_task the fail_task to set */ public void setFail_task(TaskDetails fail_task) { this.fail_task = fail_task; } /** * @param expandInlineJSON when set to true, the in-lined JSON strings are expanded to a * full json document */ public void setExpandInlineJSON(boolean expandInlineJSON) { this.expandInlineJSON = expandInlineJSON; } /** * @return true if the json strings within the payload should be expanded. */ public boolean isExpandInlineJSON() { return expandInlineJSON; } } @ProtoMessage public static class TaskDetails { @ProtoField(id = 1) private String workflowId; @ProtoField(id = 2) private String taskRefName; @ProtoField(id = 3) private Map<String, Object> output = new HashMap<>(); @ProtoField(id = 4) @Hidden private Any outputMessage; @ProtoField(id = 5) private String taskId; /** * @return the workflowId */ public String getWorkflowId() { return workflowId; } /** * @param workflowId the workflowId to set */ public void setWorkflowId(String workflowId) { this.workflowId = workflowId; } /** * @return the taskRefName */ public String getTaskRefName() { return taskRefName; } /** * @param taskRefName the taskRefName to set */ public void setTaskRefName(String taskRefName) { this.taskRefName = taskRefName; } /** * @return the output */ public Map<String, Object> getOutput() { return output; } /** * @param output the output to set */ public void setOutput(Map<String, Object> output) { this.output = output; } public Any getOutputMessage() { return outputMessage; } public void setOutputMessage(Any outputMessage) { this.outputMessage = outputMessage; } /** * @return the taskId */ public String getTaskId() { return taskId; } /** * @param taskId the taskId to set */ public void setTaskId(String taskId) { this.taskId = taskId; } } @ProtoMessage public static class StartWorkflow { @ProtoField(id = 1) private String name; @ProtoField(id = 2) private Integer version; @ProtoField(id = 3) private String correlationId; @ProtoField(id = 4) private Map<String, Object> input = new HashMap<>(); @ProtoField(id = 5) @Hidden private Any inputMessage; @ProtoField(id = 6) private Map<String, String> taskToDomain; /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the version */ public Integer getVersion() { return version; } /** * @param version the version to set */ public void setVersion(Integer version) { this.version = version; } /** * @return the correlationId */ public String getCorrelationId() { return correlationId; } /** * @param correlationId the correlationId to set */ public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } /** * @return the input */ public Map<String, Object> getInput() { return input; } /** * @param input the input to set */ public void setInput(Map<String, Object> input) { this.input = input; } public Any getInputMessage() { return inputMessage; } public void setInputMessage(Any inputMessage) { this.inputMessage = inputMessage; } public Map<String, String> getTaskToDomain() { return taskToDomain; } public void setTaskToDomain(Map<String, String> taskToDomain) { this.taskToDomain = taskToDomain; } } }
6,924
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/metadata/acl/Permission.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.metadata.acl; import com.netflix.conductor.annotations.protogen.ProtoEnum; @ProtoEnum public enum Permission { OWNER, OPERATOR }
6,925
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/validation/ValidationError.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.validation; import java.util.StringJoiner; /** Captures a validation error that can be returned in {@link ErrorResponse}. */ public class ValidationError { private String path; private String message; private String invalidValue; public ValidationError() {} public ValidationError(String path, String message, String invalidValue) { this.path = path; this.message = message; this.invalidValue = invalidValue; } public String getPath() { return path; } public String getMessage() { return message; } public String getInvalidValue() { return invalidValue; } public void setPath(String path) { this.path = path; } public void setMessage(String message) { this.message = message; } public void setInvalidValue(String invalidValue) { this.invalidValue = invalidValue; } @Override public String toString() { return new StringJoiner(", ", ValidationError.class.getSimpleName() + "[", "]") .add("path='" + path + "'") .add("message='" + message + "'") .add("invalidValue='" + invalidValue + "'") .toString(); } }
6,926
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/validation/ErrorResponse.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.validation; import java.util.List; public class ErrorResponse { private int status; private String code; private String message; private String instance; private boolean retryable; private List<ValidationError> validationErrors; public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public List<ValidationError> getValidationErrors() { return validationErrors; } public void setValidationErrors(List<ValidationError> validationErrors) { this.validationErrors = validationErrors; } public boolean isRetryable() { return retryable; } public void setRetryable(boolean retryable) { this.retryable = retryable; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String getInstance() { return instance; } public void setInstance(String instance) { this.instance = instance; } }
6,927
0
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common
Create_ds/conductor/common/src/main/java/com/netflix/conductor/common/jackson/JsonProtoModule.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.jackson; import java.io.IOException; import org.springframework.stereotype.Component; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.protobuf.Any; import com.google.protobuf.ByteString; import com.google.protobuf.Message; /** * JsonProtoModule can be registered into an {@link ObjectMapper} to enable the serialization and * deserialization of ProtoBuf objects from/to JSON. * * <p>Right now this module only provides (de)serialization for the {@link Any} ProtoBuf type, as * this is the only ProtoBuf object which we're currently exposing through the REST API. * * <p>Annotated as {@link Component} so Spring can register it with {@link ObjectMapper} * * @see AnySerializer * @see AnyDeserializer * @see org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration */ @Component(JsonProtoModule.NAME) public class JsonProtoModule extends SimpleModule { public static final String NAME = "ConductorJsonProtoModule"; private static final String JSON_TYPE = "@type"; private static final String JSON_VALUE = "@value"; /** * AnySerializer converts a ProtoBuf {@link Any} object into its JSON representation. * * <p>This is <b>not</b> a canonical ProtoBuf JSON representation. Let us explain what we're * trying to accomplish here: * * <p>The {@link Any} ProtoBuf message is a type in the PB standard library that can store any * other arbitrary ProtoBuf message in a type-safe way, even when the server has no knowledge of * the schema of the stored message. * * <p>It accomplishes this by storing a tuple of information: an URL-like type declaration for * the stored message, and the serialized binary encoding of the stored message itself. Language * specific implementations of ProtoBuf provide helper methods to encode and decode arbitrary * messages into an {@link Any} object ({@link Any#pack(Message)} in Java). * * <p>We want to expose these {@link Any} objects in the REST API because they've been * introduced as part of the new GRPC interface to Conductor, but unfortunately we cannot encode * them using their canonical ProtoBuf JSON encoding. According to the docs: * * <p>The JSON representation of an `Any` value uses the regular representation of the * deserialized, embedded message, with an additional field `@type` which contains the type URL. * Example: * * <p>package google.profile; message Person { string first_name = 1; string last_name = 2; } { * "@type": "type.googleapis.com/google.profile.Person", "firstName": <string>, "lastName": * <string> } * * <p>In order to accomplish this representation, the PB-JSON encoder needs to have knowledge of * all the ProtoBuf messages that could be serialized inside the {@link Any} message. This is * not possible to accomplish inside the Conductor server, which is simply passing through * arbitrary payloads from/to clients. * * <p>Consequently, to actually expose the Message through the REST API, we must create a custom * encoding that contains the raw data of the serialized message, as we are not able to * deserialize it on the server. We simply return a dictionary with '@type' and '@value' keys, * where '@type' is identical to the canonical representation, but '@value' contains a base64 * encoded string with the binary data of the serialized message. * * <p>Since all the provided Conductor clients are required to know this encoding, it's always * possible to re-build the original {@link Any} message regardless of the client's language. * * <p>{@see AnyDeserializer} */ @SuppressWarnings("InnerClassMayBeStatic") protected class AnySerializer extends JsonSerializer<Any> { @Override public void serialize(Any value, JsonGenerator jgen, SerializerProvider provider) throws IOException { jgen.writeStartObject(); jgen.writeStringField(JSON_TYPE, value.getTypeUrl()); jgen.writeBinaryField(JSON_VALUE, value.getValue().toByteArray()); jgen.writeEndObject(); } } /** * AnyDeserializer converts the custom JSON representation of an {@link Any} value into its * original form. * * <p>{@see AnySerializer} for details on this representation. */ @SuppressWarnings("InnerClassMayBeStatic") protected class AnyDeserializer extends JsonDeserializer<Any> { @Override public Any deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { JsonNode root = p.getCodec().readTree(p); JsonNode type = root.get(JSON_TYPE); JsonNode value = root.get(JSON_VALUE); if (type == null || !type.isTextual()) { ctxt.reportMappingException( "invalid '@type' field when deserializing ProtoBuf Any object"); } if (value == null || !value.isTextual()) { ctxt.reportMappingException( "invalid '@value' field when deserializing ProtoBuf Any object"); } return Any.newBuilder() .setTypeUrl(type.textValue()) .setValue(ByteString.copyFrom(value.binaryValue())) .build(); } } public JsonProtoModule() { super(NAME); addSerializer(Any.class, new AnySerializer()); addDeserializer(Any.class, new AnyDeserializer()); } }
6,928
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Set; import java.util.TimeZone; import java.util.UUID; import java.util.function.Supplier; import org.junit.Test; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.es6.utils.TestUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.ImmutableMap; import static org.junit.Assert.*; public class TestElasticSearchRestDAOV6 extends ElasticSearchRestDaoBaseTest { private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); private static final String INDEX_PREFIX = "conductor"; private static final String WORKFLOW_DOC_TYPE = "workflow"; private static final String TASK_DOC_TYPE = "task"; private static final String MSG_DOC_TYPE = "message"; private static final String EVENT_DOC_TYPE = "event"; private static final String LOG_INDEX_PREFIX = "task_log"; private boolean indexExists(final String index) throws IOException { return indexDAO.doesResourceExist("/" + index); } private boolean doesMappingExist(final String index, final String mappingName) throws IOException { return indexDAO.doesResourceExist("/" + index + "/_mapping/" + mappingName); } @Test public void assertInitialSetup() throws IOException { SIMPLE_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT")); String workflowIndex = INDEX_PREFIX + "_" + WORKFLOW_DOC_TYPE; String taskIndex = INDEX_PREFIX + "_" + TASK_DOC_TYPE; String taskLogIndex = INDEX_PREFIX + "_" + LOG_INDEX_PREFIX + "_" + SIMPLE_DATE_FORMAT.format(new Date()); String messageIndex = INDEX_PREFIX + "_" + MSG_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); String eventIndex = INDEX_PREFIX + "_" + EVENT_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); assertTrue("Index 'conductor_workflow' should exist", indexExists("conductor_workflow")); assertTrue("Index 'conductor_task' should exist", indexExists("conductor_task")); assertTrue("Index '" + taskLogIndex + "' should exist", indexExists(taskLogIndex)); assertTrue("Index '" + messageIndex + "' should exist", indexExists(messageIndex)); assertTrue("Index '" + eventIndex + "' should exist", indexExists(eventIndex)); assertTrue( "Mapping 'workflow' for index 'conductor' should exist", doesMappingExist(workflowIndex, WORKFLOW_DOC_TYPE)); assertTrue( "Mapping 'task' for index 'conductor' should exist", doesMappingExist(taskIndex, TASK_DOC_TYPE)); } @Test public void shouldIndexWorkflow() throws JsonProcessingException { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); } @Test public void shouldIndexWorkflowAsync() throws Exception { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.asyncIndexWorkflow(workflowSummary).get(); assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); } @Test public void shouldRemoveWorkflow() { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed List<String> workflows = tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); assertEquals(1, workflows.size()); indexDAO.removeWorkflow(workflowSummary.getWorkflowId()); workflows = tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 0); assertTrue("Workflow was not removed.", workflows.isEmpty()); } @Test public void shouldAsyncRemoveWorkflow() throws Exception { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed List<String> workflows = tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); assertEquals(1, workflows.size()); indexDAO.asyncRemoveWorkflow(workflowSummary.getWorkflowId()).get(); workflows = tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 0); assertTrue("Workflow was not removed.", workflows.isEmpty()); } @Test public void shouldUpdateWorkflow() throws JsonProcessingException { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); indexDAO.updateWorkflow( workflowSummary.getWorkflowId(), new String[] {"status"}, new Object[] {WorkflowStatus.COMPLETED}); workflowSummary.setStatus(WorkflowStatus.COMPLETED); assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); } @Test public void shouldAsyncUpdateWorkflow() throws Exception { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); indexDAO.asyncUpdateWorkflow( workflowSummary.getWorkflowId(), new String[] {"status"}, new Object[] {WorkflowStatus.FAILED}) .get(); workflowSummary.setStatus(WorkflowStatus.FAILED); assertWorkflowSummary(workflowSummary.getWorkflowId(), workflowSummary); } @Test public void shouldIndexTask() { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); List<String> tasks = tryFindResults(() -> searchTasks(taskSummary)); assertEquals(taskSummary.getTaskId(), tasks.get(0)); } @Test public void shouldIndexTaskAsync() throws Exception { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.asyncIndexTask(taskSummary).get(); List<String> tasks = tryFindResults(() -> searchTasks(taskSummary)); assertEquals(taskSummary.getTaskId(), tasks.get(0)); } @Test public void shouldRemoveTask() { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); TaskSummary taskSummary = TestUtils.loadTaskSnapshot( objectMapper, "task_summary", workflowSummary.getWorkflowId()); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.removeTask(workflowSummary.getWorkflowId(), taskSummary.getTaskId()); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertTrue("Task was not removed.", tasks.isEmpty()); } @Test public void shouldAsyncRemoveTask() throws Exception { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); TaskSummary taskSummary = TestUtils.loadTaskSnapshot( objectMapper, "task_summary", workflowSummary.getWorkflowId()); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.asyncRemoveTask(workflowSummary.getWorkflowId(), taskSummary.getTaskId()).get(); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertTrue("Task was not removed.", tasks.isEmpty()); } @Test public void shouldNotRemoveTaskWhenNotAssociatedWithWorkflow() { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.removeTask("InvalidWorkflow", taskSummary.getTaskId()); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertFalse("Task was removed.", tasks.isEmpty()); } @Test public void shouldNotAsyncRemoveTaskWhenNotAssociatedWithWorkflow() throws Exception { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.asyncRemoveTask("InvalidWorkflow", taskSummary.getTaskId()).get(); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertFalse("Task was removed.", tasks.isEmpty()); } @Test public void shouldAddTaskExecutionLogs() { List<TaskExecLog> logs = new ArrayList<>(); String taskId = uuid(); logs.add(createLog(taskId, "log1")); logs.add(createLog(taskId, "log2")); logs.add(createLog(taskId, "log3")); indexDAO.addTaskExecutionLogs(logs); List<TaskExecLog> indexedLogs = tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); assertEquals(3, indexedLogs.size()); assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); } @Test public void shouldAddTaskExecutionLogsAsync() throws Exception { List<TaskExecLog> logs = new ArrayList<>(); String taskId = uuid(); logs.add(createLog(taskId, "log1")); logs.add(createLog(taskId, "log2")); logs.add(createLog(taskId, "log3")); indexDAO.asyncAddTaskExecutionLogs(logs).get(); List<TaskExecLog> indexedLogs = tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); assertEquals(3, indexedLogs.size()); assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); } @Test public void shouldAddMessage() { String queue = "queue"; Message message1 = new Message(uuid(), "payload1", null); Message message2 = new Message(uuid(), "payload2", null); indexDAO.addMessage(queue, message1); indexDAO.addMessage(queue, message2); List<Message> indexedMessages = tryFindResults(() -> indexDAO.getMessages(queue), 2); assertEquals(2, indexedMessages.size()); assertTrue( "Not all messages was indexed", indexedMessages.containsAll(Arrays.asList(message1, message2))); } @Test public void shouldAddEventExecution() { String event = "event"; EventExecution execution1 = createEventExecution(event); EventExecution execution2 = createEventExecution(event); indexDAO.addEventExecution(execution1); indexDAO.addEventExecution(execution2); List<EventExecution> indexedExecutions = tryFindResults(() -> indexDAO.getEventExecutions(event), 2); assertEquals(2, indexedExecutions.size()); assertTrue( "Not all event executions was indexed", indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); } @Test public void shouldAsyncAddEventExecution() throws Exception { String event = "event2"; EventExecution execution1 = createEventExecution(event); EventExecution execution2 = createEventExecution(event); indexDAO.asyncAddEventExecution(execution1).get(); indexDAO.asyncAddEventExecution(execution2).get(); List<EventExecution> indexedExecutions = tryFindResults(() -> indexDAO.getEventExecutions(event), 2); assertEquals(2, indexedExecutions.size()); assertTrue( "Not all event executions was indexed", indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); } @Test public void shouldAddIndexPrefixToIndexTemplate() throws Exception { String json = TestUtils.loadJsonResource("expected_template_task_log"); String content = indexDAO.loadTypeMappingSource("/template_task_log.json"); assertEquals(json, content); } @Test public void shouldCountWorkflows() { int counts = 1100; for (int i = 0; i < counts; i++) { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); } // wait for workflow to be indexed long result = tryGetCount(() -> getWorkflowCount("template_workflow", "RUNNING"), counts); assertEquals(counts, result); } @Test public void shouldFindWorkflow() { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed List<WorkflowSummary> workflows = tryFindResults(() -> searchWorkflowSummary(workflowSummary.getWorkflowId()), 1); assertEquals(1, workflows.size()); assertEquals(workflowSummary, workflows.get(0)); } @Test public void shouldFindTask() { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); List<TaskSummary> tasks = tryFindResults(() -> searchTaskSummary(taskSummary)); assertEquals(1, tasks.size()); assertEquals(taskSummary, tasks.get(0)); } private long tryGetCount(Supplier<Long> countFunction, int resultsCount) { long result = 0; for (int i = 0; i < 20; i++) { result = countFunction.get(); if (result == resultsCount) { return result; } try { Thread.sleep(100); } catch (InterruptedException e) { throw new RuntimeException(e.getMessage(), e); } } return result; } // Get total workflow counts given the name and status private long getWorkflowCount(String workflowName, String status) { return indexDAO.getWorkflowCount( "status=\"" + status + "\" AND workflowType=\"" + workflowName + "\"", "*"); } private void assertWorkflowSummary(String workflowId, WorkflowSummary summary) throws JsonProcessingException { assertEquals(summary.getWorkflowType(), indexDAO.get(workflowId, "workflowType")); assertEquals(String.valueOf(summary.getVersion()), indexDAO.get(workflowId, "version")); assertEquals(summary.getWorkflowId(), indexDAO.get(workflowId, "workflowId")); assertEquals(summary.getCorrelationId(), indexDAO.get(workflowId, "correlationId")); assertEquals(summary.getStartTime(), indexDAO.get(workflowId, "startTime")); assertEquals(summary.getUpdateTime(), indexDAO.get(workflowId, "updateTime")); assertEquals(summary.getEndTime(), indexDAO.get(workflowId, "endTime")); assertEquals(summary.getStatus().name(), indexDAO.get(workflowId, "status")); assertEquals(summary.getInput(), indexDAO.get(workflowId, "input")); assertEquals(summary.getOutput(), indexDAO.get(workflowId, "output")); assertEquals( summary.getReasonForIncompletion(), indexDAO.get(workflowId, "reasonForIncompletion")); assertEquals( String.valueOf(summary.getExecutionTime()), indexDAO.get(workflowId, "executionTime")); assertEquals(summary.getEvent(), indexDAO.get(workflowId, "event")); assertEquals( summary.getFailedReferenceTaskNames(), indexDAO.get(workflowId, "failedReferenceTaskNames")); assertEquals( summary.getFailedTaskNames(), objectMapper.readValue(indexDAO.get(workflowId, "failedTaskNames"), Set.class)); } private <T> List<T> tryFindResults(Supplier<List<T>> searchFunction) { return tryFindResults(searchFunction, 1); } private <T> List<T> tryFindResults(Supplier<List<T>> searchFunction, int resultsCount) { List<T> result = Collections.emptyList(); for (int i = 0; i < 20; i++) { result = searchFunction.get(); if (result.size() == resultsCount) { return result; } try { Thread.sleep(100); } catch (InterruptedException e) { throw new RuntimeException(e.getMessage(), e); } } return result; } private List<String> searchWorkflows(String workflowId) { return indexDAO.searchWorkflows( "", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.emptyList()) .getResults(); } private List<WorkflowSummary> searchWorkflowSummary(String workflowId) { return indexDAO.searchWorkflowSummary( "", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.emptyList()) .getResults(); } private List<String> searchWorkflows(String workflowName, String status) { List<String> sortOptions = new ArrayList<>(); sortOptions.add("startTime:DESC"); return indexDAO.searchWorkflows( "status=\"" + status + "\" AND workflowType=\"" + workflowName + "\"", "*", 0, 1000, sortOptions) .getResults(); } private List<String> searchTasks(TaskSummary taskSummary) { return indexDAO.searchTasks( "", "workflowId:\"" + taskSummary.getWorkflowId() + "\"", 0, 100, Collections.emptyList()) .getResults(); } private List<TaskSummary> searchTaskSummary(TaskSummary taskSummary) { return indexDAO.searchTaskSummary( "", "workflowId:\"" + taskSummary.getWorkflowId() + "\"", 0, 100, Collections.emptyList()) .getResults(); } private TaskExecLog createLog(String taskId, String log) { TaskExecLog taskExecLog = new TaskExecLog(log); taskExecLog.setTaskId(taskId); return taskExecLog; } private EventExecution createEventExecution(String event) { EventExecution execution = new EventExecution(uuid(), uuid()); execution.setName("name"); execution.setEvent(event); execution.setCreated(System.currentTimeMillis()); execution.setStatus(EventExecution.Status.COMPLETED); execution.setAction(EventHandler.Action.Type.start_workflow); execution.setOutput(ImmutableMap.of("a", 1, "b", 2, "c", 3)); return execution; } private String uuid() { return UUID.randomUUID().toString(); } }
6,929
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.net.InetAddress; import java.util.concurrent.ExecutionException; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.transport.client.PreBuiltTransportClient; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.springframework.retry.support.RetryTemplate; abstract class ElasticSearchDaoBaseTest extends ElasticSearchTest { protected TransportClient elasticSearchClient; protected ElasticSearchDAOV6 indexDAO; @Before public void setup() throws Exception { int mappedPort = container.getMappedPort(9300); properties.setUrl("tcp://localhost:" + mappedPort); Settings settings = Settings.builder().put("client.transport.ignore_cluster_name", true).build(); elasticSearchClient = new PreBuiltTransportClient(settings) .addTransportAddress( new TransportAddress( InetAddress.getByName("localhost"), mappedPort)); indexDAO = new ElasticSearchDAOV6( elasticSearchClient, new RetryTemplate(), properties, objectMapper); indexDAO.setup(); } @AfterClass public static void closeClient() { container.stop(); } @After public void tearDown() { deleteAllIndices(); if (elasticSearchClient != null) { elasticSearchClient.close(); } } private void deleteAllIndices() { ImmutableOpenMap<String, IndexMetaData> indices = elasticSearchClient .admin() .cluster() .prepareState() .get() .getState() .getMetaData() .getIndices(); indices.forEach( cursor -> { try { elasticSearchClient .admin() .indices() .delete(new DeleteIndexRequest(cursor.value.getIndex().getName())) .get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } }); } }
6,930
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchRestDAOV6Batch.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.util.HashMap; import java.util.concurrent.TimeUnit; import org.junit.Test; import org.springframework.test.context.TestPropertySource; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.fasterxml.jackson.core.JsonProcessingException; import static org.awaitility.Awaitility.await; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @TestPropertySource(properties = "conductor.elasticsearch.indexBatchSize=2") public class TestElasticSearchRestDAOV6Batch extends ElasticSearchRestDaoBaseTest { @Test public void indexTaskWithBatchSizeTwo() { String correlationId = "some-correlation-id"; TaskSummary taskSummary = new TaskSummary(); taskSummary.setTaskId("some-task-id"); taskSummary.setWorkflowId("some-workflow-instance-id"); taskSummary.setTaskType("some-task-type"); taskSummary.setStatus(Status.FAILED); try { taskSummary.setInput( objectMapper.writeValueAsString( new HashMap<String, Object>() { { put("input_key", "input_value"); } })); } catch (JsonProcessingException e) { throw new RuntimeException(e); } taskSummary.setCorrelationId(correlationId); taskSummary.setTaskDefName("some-task-def-name"); taskSummary.setReasonForIncompletion("some-failure-reason"); indexDAO.indexTask(taskSummary); indexDAO.indexTask(taskSummary); await().atMost(5, TimeUnit.SECONDS) .untilAsserted( () -> { SearchResult<String> result = indexDAO.searchTasks( "correlationId='" + correlationId + "'", "*", 0, 10000, null); assertTrue( "should return 1 or more search results", result.getResults().size() > 0); assertEquals( "taskId should match the indexed task", "some-task-id", result.getResults().get(0)); }); } }
6,931
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.util.HashMap; import java.util.concurrent.TimeUnit; import org.junit.Test; import org.springframework.test.context.TestPropertySource; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.fasterxml.jackson.core.JsonProcessingException; import static org.awaitility.Awaitility.await; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @TestPropertySource(properties = "conductor.elasticsearch.indexBatchSize=2") public class TestElasticSearchDAOV6Batch extends ElasticSearchDaoBaseTest { @Test public void indexTaskWithBatchSizeTwo() { String correlationId = "some-correlation-id"; TaskSummary taskSummary = new TaskSummary(); taskSummary.setTaskId("some-task-id"); taskSummary.setWorkflowId("some-workflow-instance-id"); taskSummary.setTaskType("some-task-type"); taskSummary.setStatus(Status.FAILED); try { taskSummary.setInput( objectMapper.writeValueAsString( new HashMap<String, Object>() { { put("input_key", "input_value"); } })); } catch (JsonProcessingException e) { throw new RuntimeException(e); } taskSummary.setCorrelationId(correlationId); taskSummary.setTaskDefName("some-task-def-name"); taskSummary.setReasonForIncompletion("some-failure-reason"); indexDAO.indexTask(taskSummary); indexDAO.indexTask(taskSummary); await().atMost(5, TimeUnit.SECONDS) .untilAsserted( () -> { SearchResult<String> result = indexDAO.searchTasks( "correlationId='" + correlationId + "'", "*", 0, 10000, null); assertTrue( "should return 1 or more search results", result.getResults().size() > 0); assertEquals( "taskId should match the indexed task", "some-task-id", result.getResults().get(0)); }); } }
6,932
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import org.apache.http.HttpHost; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.junit.After; import org.junit.Before; import org.springframework.retry.support.RetryTemplate; abstract class ElasticSearchRestDaoBaseTest extends ElasticSearchTest { protected RestClient restClient; protected ElasticSearchRestDAOV6 indexDAO; @Before public void setup() throws Exception { String httpHostAddress = container.getHttpHostAddress(); String host = httpHostAddress.split(":")[0]; int port = Integer.parseInt(httpHostAddress.split(":")[1]); properties.setUrl("http://" + httpHostAddress); RestClientBuilder restClientBuilder = RestClient.builder(new HttpHost(host, port, "http")); restClient = restClientBuilder.build(); indexDAO = new ElasticSearchRestDAOV6( restClientBuilder, new RetryTemplate(), properties, objectMapper); indexDAO.setup(); } @After public void tearDown() throws Exception { deleteAllIndices(); if (restClient != null) { restClient.close(); } } private void deleteAllIndices() throws IOException { Response beforeResponse = restClient.performRequest("GET", "/_cat/indices"); Reader streamReader = new InputStreamReader(beforeResponse.getEntity().getContent()); BufferedReader bufferedReader = new BufferedReader(streamReader); String line; while ((line = bufferedReader.readLine()) != null) { String[] fields = line.split("\\s"); String endpoint = String.format("/%s", fields[2]); restClient.performRequest("DELETE", endpoint); } } }
6,933
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.text.SimpleDateFormat; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.function.Supplier; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.junit.Test; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.es6.utils.TestUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.ImmutableMap; import static org.junit.Assert.*; import static org.junit.Assert.assertFalse; public class TestElasticSearchDAOV6 extends ElasticSearchDaoBaseTest { private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); private static final String INDEX_PREFIX = "conductor"; private static final String WORKFLOW_DOC_TYPE = "workflow"; private static final String TASK_DOC_TYPE = "task"; private static final String MSG_DOC_TYPE = "message"; private static final String EVENT_DOC_TYPE = "event"; private static final String LOG_INDEX_PREFIX = "task_log"; @Test public void assertInitialSetup() { SIMPLE_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT")); String workflowIndex = INDEX_PREFIX + "_" + WORKFLOW_DOC_TYPE; String taskIndex = INDEX_PREFIX + "_" + TASK_DOC_TYPE; String taskLogIndex = INDEX_PREFIX + "_" + LOG_INDEX_PREFIX + "_" + SIMPLE_DATE_FORMAT.format(new Date()); String messageIndex = INDEX_PREFIX + "_" + MSG_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); String eventIndex = INDEX_PREFIX + "_" + EVENT_DOC_TYPE + "_" + SIMPLE_DATE_FORMAT.format(new Date()); assertTrue("Index 'conductor_workflow' should exist", indexExists("conductor_workflow")); assertTrue("Index 'conductor_task' should exist", indexExists("conductor_task")); assertTrue("Index '" + taskLogIndex + "' should exist", indexExists(taskLogIndex)); assertTrue("Index '" + messageIndex + "' should exist", indexExists(messageIndex)); assertTrue("Index '" + eventIndex + "' should exist", indexExists(eventIndex)); assertTrue( "Mapping 'workflow' for index 'conductor' should exist", doesMappingExist(workflowIndex, WORKFLOW_DOC_TYPE)); assertTrue( "Mapping 'task' for index 'conductor' should exist", doesMappingExist(taskIndex, TASK_DOC_TYPE)); } private boolean indexExists(final String index) { IndicesExistsRequest request = new IndicesExistsRequest(index); try { return elasticSearchClient.admin().indices().exists(request).get().isExists(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } private boolean doesMappingExist(final String index, final String mappingName) { GetMappingsRequest request = new GetMappingsRequest().indices(index); try { GetMappingsResponse response = elasticSearchClient.admin().indices().getMappings(request).get(); return response.getMappings().get(index).containsKey(mappingName); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } @Test public void shouldIndexWorkflow() throws JsonProcessingException { WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflow); assertWorkflowSummary(workflow.getWorkflowId(), workflow); } @Test public void shouldIndexWorkflowAsync() throws Exception { WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.asyncIndexWorkflow(workflow).get(); assertWorkflowSummary(workflow.getWorkflowId(), workflow); } @Test public void shouldRemoveWorkflow() { WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflow); // wait for workflow to be indexed List<String> workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 1); assertEquals(1, workflows.size()); indexDAO.removeWorkflow(workflow.getWorkflowId()); workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 0); assertTrue("Workflow was not removed.", workflows.isEmpty()); } @Test public void shouldAsyncRemoveWorkflow() throws Exception { WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflow); // wait for workflow to be indexed List<String> workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 1); assertEquals(1, workflows.size()); indexDAO.asyncRemoveWorkflow(workflow.getWorkflowId()).get(); workflows = tryFindResults(() -> searchWorkflows(workflow.getWorkflowId()), 0); assertTrue("Workflow was not removed.", workflows.isEmpty()); } @Test public void shouldUpdateWorkflow() throws JsonProcessingException { WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflow); indexDAO.updateWorkflow( workflow.getWorkflowId(), new String[] {"status"}, new Object[] {WorkflowStatus.COMPLETED}); workflow.setStatus(WorkflowStatus.COMPLETED); assertWorkflowSummary(workflow.getWorkflowId(), workflow); } @Test public void shouldAsyncUpdateWorkflow() throws Exception { WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflow); indexDAO.asyncUpdateWorkflow( workflow.getWorkflowId(), new String[] {"status"}, new Object[] {WorkflowStatus.FAILED}) .get(); workflow.setStatus(WorkflowStatus.FAILED); assertWorkflowSummary(workflow.getWorkflowId(), workflow); } @Test public void shouldIndexTask() { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); List<String> tasks = tryFindResults(() -> searchTasks(taskSummary)); assertEquals(taskSummary.getTaskId(), tasks.get(0)); } @Test public void shouldIndexTaskAsync() throws Exception { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.asyncIndexTask(taskSummary).get(); List<String> tasks = tryFindResults(() -> searchTasks(taskSummary)); assertEquals(taskSummary.getTaskId(), tasks.get(0)); } @Test public void shouldRemoveTask() { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); TaskSummary taskSummary = TestUtils.loadTaskSnapshot( objectMapper, "task_summary", workflowSummary.getWorkflowId()); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.removeTask(workflowSummary.getWorkflowId(), taskSummary.getTaskId()); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertTrue("Task was not removed.", tasks.isEmpty()); } @Test public void shouldAsyncRemoveTask() throws Exception { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed tryFindResults(() -> searchWorkflows(workflowSummary.getWorkflowId()), 1); TaskSummary taskSummary = TestUtils.loadTaskSnapshot( objectMapper, "task_summary", workflowSummary.getWorkflowId()); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.asyncRemoveTask(workflowSummary.getWorkflowId(), taskSummary.getTaskId()).get(); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertTrue("Task was not removed.", tasks.isEmpty()); } @Test public void shouldNotRemoveTaskWhenNotAssociatedWithWorkflow() { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.removeTask("InvalidWorkflow", taskSummary.getTaskId()); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertFalse("Task was removed.", tasks.isEmpty()); } @Test public void shouldNotAsyncRemoveTaskWhenNotAssociatedWithWorkflow() throws Exception { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); // Wait for the task to be indexed List<String> tasks = tryFindResults(() -> searchTasks(taskSummary), 1); indexDAO.asyncRemoveTask("InvalidWorkflow", taskSummary.getTaskId()).get(); tasks = tryFindResults(() -> searchTasks(taskSummary), 0); assertFalse("Task was removed.", tasks.isEmpty()); } @Test public void shouldAddTaskExecutionLogs() { List<TaskExecLog> logs = new ArrayList<>(); String taskId = uuid(); logs.add(createLog(taskId, "log1")); logs.add(createLog(taskId, "log2")); logs.add(createLog(taskId, "log3")); indexDAO.addTaskExecutionLogs(logs); List<TaskExecLog> indexedLogs = tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); assertEquals(3, indexedLogs.size()); assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); } @Test public void shouldAddTaskExecutionLogsAsync() throws Exception { List<TaskExecLog> logs = new ArrayList<>(); String taskId = uuid(); logs.add(createLog(taskId, "log1")); logs.add(createLog(taskId, "log2")); logs.add(createLog(taskId, "log3")); indexDAO.asyncAddTaskExecutionLogs(logs).get(); List<TaskExecLog> indexedLogs = tryFindResults(() -> indexDAO.getTaskExecutionLogs(taskId), 3); assertEquals(3, indexedLogs.size()); assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); } @Test public void shouldAddMessage() { String queue = "queue"; Message message1 = new Message(uuid(), "payload1", null); Message message2 = new Message(uuid(), "payload2", null); indexDAO.addMessage(queue, message1); indexDAO.addMessage(queue, message2); List<Message> indexedMessages = tryFindResults(() -> indexDAO.getMessages(queue), 2); assertEquals(2, indexedMessages.size()); assertTrue( "Not all messages was indexed", indexedMessages.containsAll(Arrays.asList(message1, message2))); } @Test public void shouldAddEventExecution() { String event = "event"; EventExecution execution1 = createEventExecution(event); EventExecution execution2 = createEventExecution(event); indexDAO.addEventExecution(execution1); indexDAO.addEventExecution(execution2); List<EventExecution> indexedExecutions = tryFindResults(() -> indexDAO.getEventExecutions(event), 2); assertEquals(2, indexedExecutions.size()); assertTrue( "Not all event executions was indexed", indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); } @Test public void shouldAsyncAddEventExecution() throws Exception { String event = "event2"; EventExecution execution1 = createEventExecution(event); EventExecution execution2 = createEventExecution(event); indexDAO.asyncAddEventExecution(execution1).get(); indexDAO.asyncAddEventExecution(execution2).get(); List<EventExecution> indexedExecutions = tryFindResults(() -> indexDAO.getEventExecutions(event), 2); assertEquals(2, indexedExecutions.size()); assertTrue( "Not all event executions was indexed", indexedExecutions.containsAll(Arrays.asList(execution1, execution2))); } @Test public void shouldAddIndexPrefixToIndexTemplate() throws Exception { String json = TestUtils.loadJsonResource("expected_template_task_log"); String content = indexDAO.loadTypeMappingSource("/template_task_log.json"); assertEquals(json, content); } @Test public void shouldCountWorkflows() { int counts = 1100; for (int i = 0; i < counts; i++) { WorkflowSummary workflow = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflow); } // wait for workflow to be indexed long result = tryGetCount(() -> getWorkflowCount("template_workflow", "RUNNING"), counts); assertEquals(counts, result); } @Test public void shouldFindWorkflow() { WorkflowSummary workflowSummary = TestUtils.loadWorkflowSnapshot(objectMapper, "workflow_summary"); indexDAO.indexWorkflow(workflowSummary); // wait for workflow to be indexed List<WorkflowSummary> workflows = tryFindResults(() -> searchWorkflowSummary(workflowSummary.getWorkflowId()), 1); assertEquals(1, workflows.size()); assertEquals(workflowSummary, workflows.get(0)); } @Test public void shouldFindTask() { TaskSummary taskSummary = TestUtils.loadTaskSnapshot(objectMapper, "task_summary"); indexDAO.indexTask(taskSummary); List<TaskSummary> tasks = tryFindResults(() -> searchTaskSummary(taskSummary)); assertEquals(1, tasks.size()); assertEquals(taskSummary, tasks.get(0)); } private long tryGetCount(Supplier<Long> countFunction, int resultsCount) { long result = 0; for (int i = 0; i < 20; i++) { result = countFunction.get(); if (result == resultsCount) { return result; } try { Thread.sleep(100); } catch (InterruptedException e) { throw new RuntimeException(e.getMessage(), e); } } return result; } // Get total workflow counts given the name and status private long getWorkflowCount(String workflowName, String status) { return indexDAO.getWorkflowCount( "status=\"" + status + "\" AND workflowType=\"" + workflowName + "\"", "*"); } private void assertWorkflowSummary(String workflowId, WorkflowSummary summary) throws JsonProcessingException { assertEquals(summary.getWorkflowType(), indexDAO.get(workflowId, "workflowType")); assertEquals(String.valueOf(summary.getVersion()), indexDAO.get(workflowId, "version")); assertEquals(summary.getWorkflowId(), indexDAO.get(workflowId, "workflowId")); assertEquals(summary.getCorrelationId(), indexDAO.get(workflowId, "correlationId")); assertEquals(summary.getStartTime(), indexDAO.get(workflowId, "startTime")); assertEquals(summary.getUpdateTime(), indexDAO.get(workflowId, "updateTime")); assertEquals(summary.getEndTime(), indexDAO.get(workflowId, "endTime")); assertEquals(summary.getStatus().name(), indexDAO.get(workflowId, "status")); assertEquals(summary.getInput(), indexDAO.get(workflowId, "input")); assertEquals(summary.getOutput(), indexDAO.get(workflowId, "output")); assertEquals( summary.getReasonForIncompletion(), indexDAO.get(workflowId, "reasonForIncompletion")); assertEquals( String.valueOf(summary.getExecutionTime()), indexDAO.get(workflowId, "executionTime")); assertEquals(summary.getEvent(), indexDAO.get(workflowId, "event")); assertEquals( summary.getFailedReferenceTaskNames(), indexDAO.get(workflowId, "failedReferenceTaskNames")); assertEquals( summary.getFailedTaskNames(), objectMapper.readValue(indexDAO.get(workflowId, "failedTaskNames"), Set.class)); } private <T> List<T> tryFindResults(Supplier<List<T>> searchFunction) { return tryFindResults(searchFunction, 1); } private <T> List<T> tryFindResults(Supplier<List<T>> searchFunction, int resultsCount) { List<T> result = Collections.emptyList(); for (int i = 0; i < 20; i++) { result = searchFunction.get(); if (result.size() == resultsCount) { return result; } try { Thread.sleep(100); } catch (InterruptedException e) { throw new RuntimeException(e.getMessage(), e); } } return result; } private List<String> searchWorkflows(String workflowId) { return indexDAO.searchWorkflows( "", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.emptyList()) .getResults(); } private List<WorkflowSummary> searchWorkflowSummary(String workflowId) { return indexDAO.searchWorkflowSummary( "", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.emptyList()) .getResults(); } private List<String> searchTasks(TaskSummary taskSummary) { return indexDAO.searchTasks( "", "workflowId:\"" + taskSummary.getWorkflowId() + "\"", 0, 100, Collections.emptyList()) .getResults(); } private List<TaskSummary> searchTaskSummary(TaskSummary taskSummary) { return indexDAO.searchTaskSummary( "", "workflowId:\"" + taskSummary.getWorkflowId() + "\"", 0, 100, Collections.emptyList()) .getResults(); } private TaskExecLog createLog(String taskId, String log) { TaskExecLog taskExecLog = new TaskExecLog(log); taskExecLog.setTaskId(taskId); return taskExecLog; } private EventExecution createEventExecution(String event) { EventExecution execution = new EventExecution(uuid(), uuid()); execution.setName("name"); execution.setEvent(event); execution.setCreated(System.currentTimeMillis()); execution.setStatus(EventExecution.Status.COMPLETED); execution.setAction(EventHandler.Action.Type.start_workflow); execution.setOutput(ImmutableMap.of("a", 1, "b", 2, "c", 3)); return execution; } private String uuid() { return UUID.randomUUID().toString(); } }
6,934
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchTest.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringRunner; import org.testcontainers.elasticsearch.ElasticsearchContainer; import org.testcontainers.utility.DockerImageName; import com.netflix.conductor.common.config.TestObjectMapperConfiguration; import com.netflix.conductor.es6.config.ElasticSearchProperties; import com.fasterxml.jackson.databind.ObjectMapper; @ContextConfiguration( classes = {TestObjectMapperConfiguration.class, ElasticSearchTest.TestConfiguration.class}) @RunWith(SpringRunner.class) @TestPropertySource( properties = {"conductor.indexing.enabled=true", "conductor.elasticsearch.version=6"}) abstract class ElasticSearchTest { @Configuration static class TestConfiguration { @Bean public ElasticSearchProperties elasticSearchProperties() { return new ElasticSearchProperties(); } } protected static final ElasticsearchContainer container = new ElasticsearchContainer( DockerImageName.parse("docker.elastic.co/elasticsearch/elasticsearch-oss") .withTag("6.8.12")); // this should match the client version @Autowired protected ObjectMapper objectMapper; @Autowired protected ElasticSearchProperties properties; @BeforeClass public static void startServer() { container.start(); } @AfterClass public static void stopServer() { container.stop(); } }
6,935
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/TestExpression.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.InputStream; import org.junit.Test; import com.netflix.conductor.es6.dao.query.parser.internal.ConstValue; import com.netflix.conductor.es6.dao.query.parser.internal.TestAbstractParser; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; public class TestExpression extends TestAbstractParser { @Test public void test() throws Exception { String test = "type='IMAGE' AND subType ='sdp' AND (metadata.width > 50 OR metadata.height > 50)"; InputStream inputStream = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); Expression expression = new Expression(inputStream); assertTrue(expression.isBinaryExpr()); assertNull(expression.getGroupedExpression()); assertNotNull(expression.getNameValue()); NameValue nameValue = expression.getNameValue(); assertEquals("type", nameValue.getName().getName()); assertEquals("=", nameValue.getOp().getOperator()); assertEquals("\"IMAGE\"", nameValue.getValue().getValue()); Expression rightHandSide = expression.getRightHandSide(); assertNotNull(rightHandSide); assertTrue(rightHandSide.isBinaryExpr()); nameValue = rightHandSide.getNameValue(); assertNotNull(nameValue); // subType = sdp assertNull(rightHandSide.getGroupedExpression()); assertEquals("subType", nameValue.getName().getName()); assertEquals("=", nameValue.getOp().getOperator()); assertEquals("\"sdp\"", nameValue.getValue().getValue()); assertEquals("AND", rightHandSide.getOperator().getOperator()); rightHandSide = rightHandSide.getRightHandSide(); assertNotNull(rightHandSide); assertFalse(rightHandSide.isBinaryExpr()); GroupedExpression groupedExpression = rightHandSide.getGroupedExpression(); assertNotNull(groupedExpression); expression = groupedExpression.getExpression(); assertNotNull(expression); assertTrue(expression.isBinaryExpr()); nameValue = expression.getNameValue(); assertNotNull(nameValue); assertEquals("metadata.width", nameValue.getName().getName()); assertEquals(">", nameValue.getOp().getOperator()); assertEquals("50", nameValue.getValue().getValue()); assertEquals("OR", expression.getOperator().getOperator()); rightHandSide = expression.getRightHandSide(); assertNotNull(rightHandSide); assertFalse(rightHandSide.isBinaryExpr()); nameValue = rightHandSide.getNameValue(); assertNotNull(nameValue); assertEquals("metadata.height", nameValue.getName().getName()); assertEquals(">", nameValue.getOp().getOperator()); assertEquals("50", nameValue.getValue().getValue()); } @Test public void testWithSysConstants() throws Exception { String test = "type='IMAGE' AND subType ='sdp' AND description IS null"; InputStream inputStream = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); Expression expression = new Expression(inputStream); assertTrue(expression.isBinaryExpr()); assertNull(expression.getGroupedExpression()); assertNotNull(expression.getNameValue()); NameValue nameValue = expression.getNameValue(); assertEquals("type", nameValue.getName().getName()); assertEquals("=", nameValue.getOp().getOperator()); assertEquals("\"IMAGE\"", nameValue.getValue().getValue()); Expression rightHandSide = expression.getRightHandSide(); assertNotNull(rightHandSide); assertTrue(rightHandSide.isBinaryExpr()); nameValue = rightHandSide.getNameValue(); assertNotNull(nameValue); // subType = sdp assertNull(rightHandSide.getGroupedExpression()); assertEquals("subType", nameValue.getName().getName()); assertEquals("=", nameValue.getOp().getOperator()); assertEquals("\"sdp\"", nameValue.getValue().getValue()); assertEquals("AND", rightHandSide.getOperator().getOperator()); rightHandSide = rightHandSide.getRightHandSide(); assertNotNull(rightHandSide); assertFalse(rightHandSide.isBinaryExpr()); GroupedExpression groupedExpression = rightHandSide.getGroupedExpression(); assertNull(groupedExpression); nameValue = rightHandSide.getNameValue(); assertNotNull(nameValue); assertEquals("description", nameValue.getName().getName()); assertEquals("IS", nameValue.getOp().getOperator()); ConstValue constValue = nameValue.getValue(); assertNotNull(constValue); assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NULL); test = "description IS not null"; inputStream = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); expression = new Expression(inputStream); nameValue = expression.getNameValue(); assertNotNull(nameValue); assertEquals("description", nameValue.getName().getName()); assertEquals("IS", nameValue.getOp().getOperator()); constValue = nameValue.getValue(); assertNotNull(constValue); assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); } }
6,936
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestComparisonOp.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; public class TestComparisonOp extends TestAbstractParser { @Test public void test() throws Exception { String[] tests = new String[] {"<", ">", "=", "!=", "IN", "BETWEEN", "STARTS_WITH"}; for (String test : tests) { ComparisonOp name = new ComparisonOp(getInputStream(test)); String nameVal = name.getOperator(); assertNotNull(nameVal); assertEquals(test, nameVal); } } @Test(expected = ParserException.class) public void testInvalidOp() throws Exception { String test = "AND"; ComparisonOp name = new ComparisonOp(getInputStream(test)); String nameVal = name.getOperator(); assertNotNull(nameVal); assertEquals(test, nameVal); } }
6,937
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestConstValue.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.util.List; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public class TestConstValue extends TestAbstractParser { @Test public void testStringConst() throws Exception { String test = "'string value'"; String expected = test.replaceAll( "'", "\""); // Quotes are removed but then the result is double quoted. ConstValue constValue = new ConstValue(getInputStream(test)); assertNotNull(constValue.getValue()); assertEquals(expected, constValue.getValue()); assertTrue(constValue.getValue() instanceof String); test = "\"string value\""; constValue = new ConstValue(getInputStream(test)); assertNotNull(constValue.getValue()); assertEquals(expected, constValue.getValue()); assertTrue(constValue.getValue() instanceof String); } @Test public void testSystemConst() throws Exception { String test = "null"; ConstValue constValue = new ConstValue(getInputStream(test)); assertNotNull(constValue.getValue()); assertTrue(constValue.getValue() instanceof String); assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NULL); test = "not null"; constValue = new ConstValue(getInputStream(test)); assertNotNull(constValue.getValue()); assertEquals(constValue.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); } @Test(expected = ParserException.class) public void testInvalid() throws Exception { String test = "'string value"; new ConstValue(getInputStream(test)); } @Test public void testNumConst() throws Exception { String test = "12345.89"; ConstValue cv = new ConstValue(getInputStream(test)); assertNotNull(cv.getValue()); assertTrue( cv.getValue() instanceof String); // Numeric values are stored as string as we are just passing thru // them to ES assertEquals(test, cv.getValue()); } @Test public void testRange() throws Exception { String test = "50 AND 100"; Range range = new Range(getInputStream(test)); assertEquals("50", range.getLow()); assertEquals("100", range.getHigh()); } @Test(expected = ParserException.class) public void testBadRange() throws Exception { String test = "50 AND"; new Range(getInputStream(test)); } @Test public void testArray() throws Exception { String test = "(1, 3, 'name', 'value2')"; ListConst listConst = new ListConst(getInputStream(test)); List<Object> list = listConst.getList(); assertEquals(4, list.size()); assertTrue(list.contains("1")); assertEquals("'value2'", list.get(3)); // Values are preserved as it is... } }
6,938
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestName.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; public class TestName extends TestAbstractParser { @Test public void test() throws Exception { String test = "metadata.en_US.lang "; Name name = new Name(getInputStream(test)); String nameVal = name.getName(); assertNotNull(nameVal); assertEquals(test.trim(), nameVal); } }
6,939
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestBooleanOp.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; public class TestBooleanOp extends TestAbstractParser { @Test public void test() throws Exception { String[] tests = new String[] {"AND", "OR"}; for (String test : tests) { BooleanOp name = new BooleanOp(getInputStream(test)); String nameVal = name.getOperator(); assertNotNull(nameVal); assertEquals(test, nameVal); } } @Test(expected = ParserException.class) public void testInvalid() throws Exception { String test = "<"; BooleanOp name = new BooleanOp(getInputStream(test)); String nameVal = name.getOperator(); assertNotNull(nameVal); assertEquals(test, nameVal); } }
6,940
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/query/parser/internal/TestAbstractParser.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.InputStream; public abstract class TestAbstractParser { protected InputStream getInputStream(String expression) { return new BufferedInputStream(new ByteArrayInputStream(expression.getBytes())); } }
6,941
0
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6
Create_ds/conductor/es6-persistence/src/test/java/com/netflix/conductor/es6/utils/TestUtils.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.utils; import java.nio.charset.StandardCharsets; import org.apache.commons.io.FileUtils; import org.springframework.util.ResourceUtils; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.utils.IDGenerator; import com.fasterxml.jackson.databind.ObjectMapper; public class TestUtils { private static final String WORKFLOW_INSTANCE_ID_PLACEHOLDER = "WORKFLOW_INSTANCE_ID"; public static WorkflowSummary loadWorkflowSnapshot( ObjectMapper objectMapper, String resourceFileName) { try { String content = loadJsonResource(resourceFileName); String workflowId = new IDGenerator().generate(); content = content.replace(WORKFLOW_INSTANCE_ID_PLACEHOLDER, workflowId); return objectMapper.readValue(content, WorkflowSummary.class); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } } public static TaskSummary loadTaskSnapshot(ObjectMapper objectMapper, String resourceFileName) { try { String content = loadJsonResource(resourceFileName); String workflowId = new IDGenerator().generate(); content = content.replace(WORKFLOW_INSTANCE_ID_PLACEHOLDER, workflowId); return objectMapper.readValue(content, TaskSummary.class); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } } public static TaskSummary loadTaskSnapshot( ObjectMapper objectMapper, String resourceFileName, String workflowId) { try { String content = loadJsonResource(resourceFileName); content = content.replace(WORKFLOW_INSTANCE_ID_PLACEHOLDER, workflowId); return objectMapper.readValue(content, TaskSummary.class); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } } public static String loadJsonResource(String resourceFileName) { try { return FileUtils.readFileToString( ResourceUtils.getFile("classpath:" + resourceFileName + ".json"), StandardCharsets.UTF_8); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } } }
6,942
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchConditions.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.config; import org.springframework.boot.autoconfigure.condition.AllNestedConditions; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; public class ElasticSearchConditions { private ElasticSearchConditions() {} public static class ElasticSearchV6Enabled extends AllNestedConditions { ElasticSearchV6Enabled() { super(ConfigurationPhase.PARSE_CONFIGURATION); } @SuppressWarnings("unused") @ConditionalOnProperty( name = "conductor.indexing.enabled", havingValue = "true", matchIfMissing = true) static class enabledIndexing {} @SuppressWarnings("unused") @ConditionalOnProperty( name = "conductor.elasticsearch.version", havingValue = "6", matchIfMissing = true) static class enabledES6 {} } }
6,943
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchV6Configuration.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.config; import java.net.InetAddress; import java.net.URI; import java.net.URL; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider; import org.elasticsearch.client.Client; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.transport.client.PreBuiltTransportClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.retry.backoff.FixedBackOffPolicy; import org.springframework.retry.support.RetryTemplate; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.es6.dao.index.ElasticSearchDAOV6; import com.netflix.conductor.es6.dao.index.ElasticSearchRestDAOV6; import com.fasterxml.jackson.databind.ObjectMapper; @Configuration(proxyBeanMethods = false) @EnableConfigurationProperties(ElasticSearchProperties.class) @Conditional(ElasticSearchConditions.ElasticSearchV6Enabled.class) public class ElasticSearchV6Configuration { private static final Logger log = LoggerFactory.getLogger(ElasticSearchV6Configuration.class); @Bean @Conditional(IsTcpProtocol.class) public Client client(ElasticSearchProperties properties) { Settings settings = Settings.builder() .put("client.transport.ignore_cluster_name", true) .put("client.transport.sniff", true) .build(); TransportClient transportClient = new PreBuiltTransportClient(settings); List<URI> clusterAddresses = getURIs(properties); if (clusterAddresses.isEmpty()) { log.warn("workflow.elasticsearch.url is not set. Indexing will remain DISABLED."); } for (URI hostAddress : clusterAddresses) { int port = Optional.ofNullable(hostAddress.getPort()).orElse(9200); try { transportClient.addTransportAddress( new TransportAddress(InetAddress.getByName(hostAddress.getHost()), port)); } catch (Exception e) { throw new RuntimeException("Invalid host" + hostAddress.getHost(), e); } } return transportClient; } @Bean @Conditional(IsHttpProtocol.class) public RestClient restClient(ElasticSearchProperties properties) { RestClientBuilder restClientBuilder = RestClient.builder(convertToHttpHosts(properties.toURLs())); if (properties.getRestClientConnectionRequestTimeout() > 0) { restClientBuilder.setRequestConfigCallback( requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout( properties.getRestClientConnectionRequestTimeout())); } return restClientBuilder.build(); } @Bean @Conditional(IsHttpProtocol.class) public RestClientBuilder restClientBuilder(ElasticSearchProperties properties) { RestClientBuilder builder = RestClient.builder(convertToHttpHosts(properties.toURLs())); if (properties.getUsername() != null && properties.getPassword() != null) { log.info( "Configure ElasticSearch with BASIC authentication. User:{}", properties.getUsername()); final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); credentialsProvider.setCredentials( AuthScope.ANY, new UsernamePasswordCredentials( properties.getUsername(), properties.getPassword())); builder.setHttpClientConfigCallback( httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)); } else { log.info("Configure ElasticSearch with no authentication."); } return builder; } @Bean @Conditional(IsHttpProtocol.class) public IndexDAO es6IndexRestDAO( RestClientBuilder restClientBuilder, ElasticSearchProperties properties, @Qualifier("es6RetryTemplate") RetryTemplate retryTemplate, ObjectMapper objectMapper) { return new ElasticSearchRestDAOV6( restClientBuilder, retryTemplate, properties, objectMapper); } @Bean @Conditional(IsTcpProtocol.class) public IndexDAO es6IndexDAO( Client client, @Qualifier("es6RetryTemplate") RetryTemplate retryTemplate, ElasticSearchProperties properties, ObjectMapper objectMapper) { return new ElasticSearchDAOV6(client, retryTemplate, properties, objectMapper); } @Bean public RetryTemplate es6RetryTemplate() { RetryTemplate retryTemplate = new RetryTemplate(); FixedBackOffPolicy fixedBackOffPolicy = new FixedBackOffPolicy(); fixedBackOffPolicy.setBackOffPeriod(1000L); retryTemplate.setBackOffPolicy(fixedBackOffPolicy); return retryTemplate; } private HttpHost[] convertToHttpHosts(List<URL> hosts) { return hosts.stream() .map(host -> new HttpHost(host.getHost(), host.getPort(), host.getProtocol())) .toArray(HttpHost[]::new); } public List<URI> getURIs(ElasticSearchProperties properties) { String clusterAddress = properties.getUrl(); String[] hosts = clusterAddress.split(","); return Arrays.stream(hosts) .map( host -> (host.startsWith("http://") || host.startsWith("https://") || host.startsWith("tcp://")) ? URI.create(host) : URI.create("tcp://" + host)) .collect(Collectors.toList()); } }
6,944
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/config/ElasticSearchProperties.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.config; import java.net.MalformedURLException; import java.net.URL; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.convert.DurationUnit; @ConfigurationProperties("conductor.elasticsearch") public class ElasticSearchProperties { /** * The comma separated list of urls for the elasticsearch cluster. Format -- * host1:port1,host2:port2 */ private String url = "localhost:9300"; /** The index prefix to be used when creating indices */ private String indexPrefix = "conductor"; /** The color of the elasticserach cluster to wait for to confirm healthy status */ private String clusterHealthColor = "green"; /** The size of the batch to be used for bulk indexing in async mode */ private int indexBatchSize = 1; /** The size of the queue used for holding async indexing tasks */ private int asyncWorkerQueueSize = 100; /** The maximum number of threads allowed in the async pool */ private int asyncMaxPoolSize = 12; /** * The time in seconds after which the async buffers will be flushed (if no activity) to prevent * data loss */ @DurationUnit(ChronoUnit.SECONDS) private Duration asyncBufferFlushTimeout = Duration.ofSeconds(10); /** The number of shards that the index will be created with */ private int indexShardCount = 5; /** The number of replicas that the index will be configured to have */ private int indexReplicasCount = 1; /** The number of task log results that will be returned in the response */ private int taskLogResultLimit = 10; /** The timeout in milliseconds used when requesting a connection from the connection manager */ private int restClientConnectionRequestTimeout = -1; /** Used to control if index management is to be enabled or will be controlled externally */ private boolean autoIndexManagementEnabled = true; /** * Document types are deprecated in ES6 and removed from ES7. This property can be used to * disable the use of specific document types with an override. This property is currently used * in ES6 module. * * <p><em>Note that this property will only take effect if {@link * ElasticSearchProperties#isAutoIndexManagementEnabled} is set to false and index management is * handled outside of this module.</em> */ private String documentTypeOverride = ""; /** Elasticsearch basic auth username */ private String username; /** Elasticsearch basic auth password */ private String password; public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getIndexPrefix() { return indexPrefix; } public void setIndexPrefix(String indexPrefix) { this.indexPrefix = indexPrefix; } public String getClusterHealthColor() { return clusterHealthColor; } public void setClusterHealthColor(String clusterHealthColor) { this.clusterHealthColor = clusterHealthColor; } public int getIndexBatchSize() { return indexBatchSize; } public void setIndexBatchSize(int indexBatchSize) { this.indexBatchSize = indexBatchSize; } public int getAsyncWorkerQueueSize() { return asyncWorkerQueueSize; } public void setAsyncWorkerQueueSize(int asyncWorkerQueueSize) { this.asyncWorkerQueueSize = asyncWorkerQueueSize; } public int getAsyncMaxPoolSize() { return asyncMaxPoolSize; } public void setAsyncMaxPoolSize(int asyncMaxPoolSize) { this.asyncMaxPoolSize = asyncMaxPoolSize; } public Duration getAsyncBufferFlushTimeout() { return asyncBufferFlushTimeout; } public void setAsyncBufferFlushTimeout(Duration asyncBufferFlushTimeout) { this.asyncBufferFlushTimeout = asyncBufferFlushTimeout; } public int getIndexShardCount() { return indexShardCount; } public void setIndexShardCount(int indexShardCount) { this.indexShardCount = indexShardCount; } public int getIndexReplicasCount() { return indexReplicasCount; } public void setIndexReplicasCount(int indexReplicasCount) { this.indexReplicasCount = indexReplicasCount; } public int getTaskLogResultLimit() { return taskLogResultLimit; } public void setTaskLogResultLimit(int taskLogResultLimit) { this.taskLogResultLimit = taskLogResultLimit; } public int getRestClientConnectionRequestTimeout() { return restClientConnectionRequestTimeout; } public void setRestClientConnectionRequestTimeout(int restClientConnectionRequestTimeout) { this.restClientConnectionRequestTimeout = restClientConnectionRequestTimeout; } public boolean isAutoIndexManagementEnabled() { return autoIndexManagementEnabled; } public void setAutoIndexManagementEnabled(boolean autoIndexManagementEnabled) { this.autoIndexManagementEnabled = autoIndexManagementEnabled; } public String getDocumentTypeOverride() { return documentTypeOverride; } public void setDocumentTypeOverride(String documentTypeOverride) { this.documentTypeOverride = documentTypeOverride; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public List<URL> toURLs() { String clusterAddress = getUrl(); String[] hosts = clusterAddress.split(","); return Arrays.stream(hosts) .map( host -> (host.startsWith("http://") || host.startsWith("https://") || host.startsWith("tcp://")) ? toURL(host) : toURL("tcp://" + host)) .collect(Collectors.toList()); } private URL toURL(String url) { try { return new URL(url); } catch (MalformedURLException e) { throw new IllegalArgumentException(url + "can not be converted to java.net.URL"); } } }
6,945
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsTcpProtocol.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.config; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Condition; import org.springframework.context.annotation.ConditionContext; import org.springframework.context.annotation.Configuration; import org.springframework.core.type.AnnotatedTypeMetadata; @EnableConfigurationProperties(ElasticSearchProperties.class) @Configuration public class IsTcpProtocol implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { String url = context.getEnvironment().getProperty("conductor.elasticsearch.url"); if (url.startsWith("http") || url.startsWith("https")) { return false; } return true; } }
6,946
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/config/IsHttpProtocol.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.config; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Condition; import org.springframework.context.annotation.ConditionContext; import org.springframework.context.annotation.Configuration; import org.springframework.core.type.AnnotatedTypeMetadata; @EnableConfigurationProperties(ElasticSearchProperties.class) @Configuration public class IsHttpProtocol implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { String url = context.getEnvironment().getProperty("conductor.elasticsearch.url"); if (url.startsWith("http") || url.startsWith("https")) { return true; } return false; } }
6,947
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestWrapper.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.util.Objects; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.springframework.lang.NonNull; /** Thread-safe wrapper for {@link BulkRequest}. */ class BulkRequestWrapper { private final BulkRequest bulkRequest; BulkRequestWrapper(@NonNull BulkRequest bulkRequest) { this.bulkRequest = Objects.requireNonNull(bulkRequest); } public void add(@NonNull UpdateRequest req) { synchronized (bulkRequest) { bulkRequest.add(Objects.requireNonNull(req)); } } public void add(@NonNull IndexRequest req) { synchronized (bulkRequest) { bulkRequest.add(Objects.requireNonNull(req)); } } BulkRequest get() { return bulkRequest; } int numberOfActions() { synchronized (bulkRequest) { return bulkRequest.numberOfActions(); } } }
6,948
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDAOV6.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.io.IOException; import java.io.InputStream; import java.text.SimpleDateFormat; import java.time.Instant; import java.time.LocalDate; import java.util.*; import java.util.concurrent.*; import java.util.stream.Collectors; import java.util.stream.IntStream; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpStatus; import org.apache.http.entity.ContentType; import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.http.nio.entity.NStringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.*; import org.elasticsearch.client.core.CountRequest; import org.elasticsearch.client.core.CountResponse; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.retry.support.RetryTemplate; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.exception.NonTransientException; import com.netflix.conductor.core.exception.TransientException; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.es6.config.ElasticSearchProperties; import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; import com.netflix.conductor.metrics.Monitors; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.type.MapType; import com.fasterxml.jackson.databind.type.TypeFactory; @Trace public class ElasticSearchRestDAOV6 extends ElasticSearchBaseDAO implements IndexDAO { private static final Logger LOGGER = LoggerFactory.getLogger(ElasticSearchRestDAOV6.class); private static final int CORE_POOL_SIZE = 6; private static final long KEEP_ALIVE_TIME = 1L; private static final String WORKFLOW_DOC_TYPE = "workflow"; private static final String TASK_DOC_TYPE = "task"; private static final String LOG_DOC_TYPE = "task_log"; private static final String EVENT_DOC_TYPE = "event"; private static final String MSG_DOC_TYPE = "message"; private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); private @interface HttpMethod { String GET = "GET"; String POST = "POST"; String PUT = "PUT"; String HEAD = "HEAD"; } private static final String className = ElasticSearchRestDAOV6.class.getSimpleName(); private final String workflowIndexName; private final String taskIndexName; private final String eventIndexPrefix; private String eventIndexName; private final String messageIndexPrefix; private String messageIndexName; private String logIndexName; private final String logIndexPrefix; private final String docTypeOverride; private final String clusterHealthColor; private final ObjectMapper objectMapper; private final RestHighLevelClient elasticSearchClient; private final RestClient elasticSearchAdminClient; private final ExecutorService executorService; private final ExecutorService logExecutorService; private final ConcurrentHashMap<String, BulkRequests> bulkRequests; private final int indexBatchSize; private final long asyncBufferFlushTimeout; private final ElasticSearchProperties properties; private final RetryTemplate retryTemplate; static { SIMPLE_DATE_FORMAT.setTimeZone(GMT); } public ElasticSearchRestDAOV6( RestClientBuilder restClientBuilder, RetryTemplate retryTemplate, ElasticSearchProperties properties, ObjectMapper objectMapper) { this.objectMapper = objectMapper; this.elasticSearchAdminClient = restClientBuilder.build(); this.elasticSearchClient = new RestHighLevelClient(restClientBuilder); this.clusterHealthColor = properties.getClusterHealthColor(); this.bulkRequests = new ConcurrentHashMap<>(); this.indexBatchSize = properties.getIndexBatchSize(); this.asyncBufferFlushTimeout = properties.getAsyncBufferFlushTimeout().toMillis(); this.properties = properties; this.indexPrefix = properties.getIndexPrefix(); if (!properties.isAutoIndexManagementEnabled() && StringUtils.isNotBlank(properties.getDocumentTypeOverride())) { docTypeOverride = properties.getDocumentTypeOverride(); } else { docTypeOverride = ""; } this.workflowIndexName = getIndexName(WORKFLOW_DOC_TYPE); this.taskIndexName = getIndexName(TASK_DOC_TYPE); this.logIndexPrefix = this.indexPrefix + "_" + LOG_DOC_TYPE; this.messageIndexPrefix = this.indexPrefix + "_" + MSG_DOC_TYPE; this.eventIndexPrefix = this.indexPrefix + "_" + EVENT_DOC_TYPE; int workerQueueSize = properties.getAsyncWorkerQueueSize(); int maximumPoolSize = properties.getAsyncMaxPoolSize(); // Set up a workerpool for performing async operations. this.executorService = new ThreadPoolExecutor( CORE_POOL_SIZE, maximumPoolSize, KEEP_ALIVE_TIME, TimeUnit.MINUTES, new LinkedBlockingQueue<>(workerQueueSize), (runnable, executor) -> { LOGGER.warn( "Request {} to async dao discarded in executor {}", runnable, executor); Monitors.recordDiscardedIndexingCount("indexQueue"); }); // Set up a workerpool for performing async operations for task_logs, event_executions, // message int corePoolSize = 1; maximumPoolSize = 2; long keepAliveTime = 30L; this.logExecutorService = new ThreadPoolExecutor( corePoolSize, maximumPoolSize, keepAliveTime, TimeUnit.SECONDS, new LinkedBlockingQueue<>(workerQueueSize), (runnable, executor) -> { LOGGER.warn( "Request {} to async log dao discarded in executor {}", runnable, executor); Monitors.recordDiscardedIndexingCount("logQueue"); }); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate(this::flushBulkRequests, 60, 30, TimeUnit.SECONDS); this.retryTemplate = retryTemplate; } @PreDestroy private void shutdown() { LOGGER.info("Gracefully shutdown executor service"); shutdownExecutorService(logExecutorService); shutdownExecutorService(executorService); } private void shutdownExecutorService(ExecutorService execService) { try { execService.shutdown(); if (execService.awaitTermination(30, TimeUnit.SECONDS)) { LOGGER.debug("tasks completed, shutting down"); } else { LOGGER.warn("Forcing shutdown after waiting for 30 seconds"); execService.shutdownNow(); } } catch (InterruptedException ie) { LOGGER.warn( "Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); execService.shutdownNow(); Thread.currentThread().interrupt(); } } @Override @PostConstruct public void setup() throws Exception { waitForHealthyCluster(); if (properties.isAutoIndexManagementEnabled()) { createIndexesTemplates(); createWorkflowIndex(); createTaskIndex(); } } private void createIndexesTemplates() { try { initIndexesTemplates(); updateIndexesNames(); Executors.newScheduledThreadPool(1) .scheduleAtFixedRate(this::updateIndexesNames, 0, 1, TimeUnit.HOURS); } catch (Exception e) { LOGGER.error("Error creating index templates!", e); } } private void initIndexesTemplates() { initIndexTemplate(LOG_DOC_TYPE); initIndexTemplate(EVENT_DOC_TYPE); initIndexTemplate(MSG_DOC_TYPE); } /** Initializes the index with the required templates and mappings. */ private void initIndexTemplate(String type) { String template = "template_" + type; try { if (doesResourceNotExist("/_template/" + template)) { LOGGER.info("Creating the index template '" + template + "'"); InputStream stream = ElasticSearchDAOV6.class.getResourceAsStream("/" + template + ".json"); byte[] templateSource = IOUtils.toByteArray(stream); HttpEntity entity = new NByteArrayEntity(templateSource, ContentType.APPLICATION_JSON); elasticSearchAdminClient.performRequest( HttpMethod.PUT, "/_template/" + template, Collections.emptyMap(), entity); } } catch (Exception e) { LOGGER.error("Failed to init " + template, e); } } private void updateIndexesNames() { logIndexName = updateIndexName(LOG_DOC_TYPE); eventIndexName = updateIndexName(EVENT_DOC_TYPE); messageIndexName = updateIndexName(MSG_DOC_TYPE); } private String updateIndexName(String type) { String indexName = this.indexPrefix + "_" + type + "_" + SIMPLE_DATE_FORMAT.format(new Date()); try { addIndex(indexName); return indexName; } catch (IOException e) { LOGGER.error("Failed to update log index name: {}", indexName, e); throw new NonTransientException("Failed to update log index name: " + indexName, e); } } private void createWorkflowIndex() { String indexName = getIndexName(WORKFLOW_DOC_TYPE); try { addIndex(indexName); } catch (IOException e) { LOGGER.error("Failed to initialize index '{}'", indexName, e); } try { addMappingToIndex(indexName, WORKFLOW_DOC_TYPE, "/mappings_docType_workflow.json"); } catch (IOException e) { LOGGER.error("Failed to add {} mapping", WORKFLOW_DOC_TYPE); } } private void createTaskIndex() { String indexName = getIndexName(TASK_DOC_TYPE); try { addIndex(indexName); } catch (IOException e) { LOGGER.error("Failed to initialize index '{}'", indexName, e); } try { addMappingToIndex(indexName, TASK_DOC_TYPE, "/mappings_docType_task.json"); } catch (IOException e) { LOGGER.error("Failed to add {} mapping", TASK_DOC_TYPE); } } /** * Waits for the ES cluster to become green. * * @throws Exception If there is an issue connecting with the ES cluster. */ private void waitForHealthyCluster() throws Exception { Map<String, String> params = new HashMap<>(); params.put("wait_for_status", this.clusterHealthColor); params.put("timeout", "30s"); elasticSearchAdminClient.performRequest("GET", "/_cluster/health", params); } /** * Adds an index to elasticsearch if it does not exist. * * @param index The name of the index to create. * @throws IOException If an error occurred during requests to ES. */ private void addIndex(final String index) throws IOException { LOGGER.info("Adding index '{}'...", index); String resourcePath = "/" + index; if (doesResourceNotExist(resourcePath)) { try { ObjectNode setting = objectMapper.createObjectNode(); ObjectNode indexSetting = objectMapper.createObjectNode(); indexSetting.put("number_of_shards", properties.getIndexShardCount()); indexSetting.put("number_of_replicas", properties.getIndexReplicasCount()); setting.set("index", indexSetting); elasticSearchAdminClient.performRequest( HttpMethod.PUT, resourcePath, Collections.emptyMap(), new NStringEntity(setting.toString(), ContentType.APPLICATION_JSON)); LOGGER.info("Added '{}' index", index); } catch (ResponseException e) { boolean errorCreatingIndex = true; Response errorResponse = e.getResponse(); if (errorResponse.getStatusLine().getStatusCode() == HttpStatus.SC_BAD_REQUEST) { JsonNode root = objectMapper.readTree(EntityUtils.toString(errorResponse.getEntity())); String errorCode = root.get("error").get("type").asText(); if ("index_already_exists_exception".equals(errorCode)) { errorCreatingIndex = false; } } if (errorCreatingIndex) { throw e; } } } else { LOGGER.info("Index '{}' already exists", index); } } /** * Adds a mapping type to an index if it does not exist. * * @param index The name of the index. * @param mappingType The name of the mapping type. * @param mappingFilename The name of the mapping file to use to add the mapping if it does not * exist. * @throws IOException If an error occurred during requests to ES. */ private void addMappingToIndex( final String index, final String mappingType, final String mappingFilename) throws IOException { LOGGER.info("Adding '{}' mapping to index '{}'...", mappingType, index); String resourcePath = "/" + index + "/_mapping/" + mappingType; if (doesResourceNotExist(resourcePath)) { HttpEntity entity = new NByteArrayEntity( loadTypeMappingSource(mappingFilename).getBytes(), ContentType.APPLICATION_JSON); elasticSearchAdminClient.performRequest( HttpMethod.PUT, resourcePath, Collections.emptyMap(), entity); LOGGER.info("Added '{}' mapping", mappingType); } else { LOGGER.info("Mapping '{}' already exists", mappingType); } } /** * Determines whether a resource exists in ES. This will call a GET method to a particular path * and return true if status 200; false otherwise. * * @param resourcePath The path of the resource to get. * @return True if it exists; false otherwise. * @throws IOException If an error occurred during requests to ES. */ public boolean doesResourceExist(final String resourcePath) throws IOException { Response response = elasticSearchAdminClient.performRequest(HttpMethod.HEAD, resourcePath); return response.getStatusLine().getStatusCode() == HttpStatus.SC_OK; } /** * The inverse of doesResourceExist. * * @param resourcePath The path of the resource to check. * @return True if it does not exist; false otherwise. * @throws IOException If an error occurred during requests to ES. */ public boolean doesResourceNotExist(final String resourcePath) throws IOException { return !doesResourceExist(resourcePath); } @Override public void indexWorkflow(WorkflowSummary workflow) { try { long startTime = Instant.now().toEpochMilli(); String workflowId = workflow.getWorkflowId(); byte[] docBytes = objectMapper.writeValueAsBytes(workflow); String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; IndexRequest request = new IndexRequest(workflowIndexName, docType, workflowId); request.source(docBytes, XContentType.JSON); elasticSearchClient.index(request, RequestOptions.DEFAULT); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing workflow: {}", endTime - startTime, workflowId); Monitors.recordESIndexTime("index_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Exception e) { Monitors.error(className, "indexWorkflow"); LOGGER.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); } } @Override public CompletableFuture<Void> asyncIndexWorkflow(WorkflowSummary workflow) { return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); } @Override public void indexTask(TaskSummary task) { try { long startTime = Instant.now().toEpochMilli(); String taskId = task.getTaskId(); String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; indexObject(taskIndexName, docType, taskId, task); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing task:{} in workflow: {}", endTime - startTime, taskId, task.getWorkflowId()); Monitors.recordESIndexTime("index_task", TASK_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Exception e) { LOGGER.error("Failed to index task: {}", task.getTaskId(), e); } } @Override public CompletableFuture<Void> asyncIndexTask(TaskSummary task) { return CompletableFuture.runAsync(() -> indexTask(task), executorService); } @Override public void addTaskExecutionLogs(List<TaskExecLog> taskExecLogs) { if (taskExecLogs.isEmpty()) { return; } long startTime = Instant.now().toEpochMilli(); BulkRequest bulkRequest = new BulkRequest(); for (TaskExecLog log : taskExecLogs) { byte[] docBytes; try { docBytes = objectMapper.writeValueAsBytes(log); } catch (JsonProcessingException e) { LOGGER.error("Failed to convert task log to JSON for task {}", log.getTaskId()); continue; } String docType = StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; IndexRequest request = new IndexRequest(logIndexName, docType); request.source(docBytes, XContentType.JSON); bulkRequest.add(request); } try { elasticSearchClient.bulk(bulkRequest, RequestOptions.DEFAULT); long endTime = Instant.now().toEpochMilli(); LOGGER.debug("Time taken {} for indexing taskExecutionLogs", endTime - startTime); Monitors.recordESIndexTime( "index_task_execution_logs", LOG_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); } catch (Exception e) { List<String> taskIds = taskExecLogs.stream().map(TaskExecLog::getTaskId).collect(Collectors.toList()); LOGGER.error("Failed to index task execution logs for tasks: {}", taskIds, e); } } @Override public CompletableFuture<Void> asyncAddTaskExecutionLogs(List<TaskExecLog> logs) { return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), logExecutorService); } @Override public List<TaskExecLog> getTaskExecutionLogs(String taskId) { try { BoolQueryBuilder query = boolQueryBuilder("taskId='" + taskId + "'", "*"); // Create the searchObjectIdsViaExpression source SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(query); searchSourceBuilder.sort(new FieldSortBuilder("createdTime").order(SortOrder.ASC)); searchSourceBuilder.size(properties.getTaskLogResultLimit()); // Generate the actual request to send to ES. String docType = StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; SearchRequest searchRequest = new SearchRequest(logIndexPrefix + "*"); searchRequest.types(docType); searchRequest.source(searchSourceBuilder); SearchResponse response = elasticSearchClient.search(searchRequest); return mapTaskExecLogsResponse(response); } catch (Exception e) { LOGGER.error("Failed to get task execution logs for task: {}", taskId, e); } return null; } private List<TaskExecLog> mapTaskExecLogsResponse(SearchResponse response) throws IOException { SearchHit[] hits = response.getHits().getHits(); List<TaskExecLog> logs = new ArrayList<>(hits.length); for (SearchHit hit : hits) { String source = hit.getSourceAsString(); TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); logs.add(tel); } return logs; } @Override public List<Message> getMessages(String queue) { try { BoolQueryBuilder query = boolQueryBuilder("queue='" + queue + "'", "*"); // Create the searchObjectIdsViaExpression source SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(query); searchSourceBuilder.sort(new FieldSortBuilder("created").order(SortOrder.ASC)); // Generate the actual request to send to ES. String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; SearchRequest searchRequest = new SearchRequest(messageIndexPrefix + "*"); searchRequest.types(docType); searchRequest.source(searchSourceBuilder); SearchResponse response = elasticSearchClient.search(searchRequest); return mapGetMessagesResponse(response); } catch (Exception e) { LOGGER.error("Failed to get messages for queue: {}", queue, e); } return null; } private List<Message> mapGetMessagesResponse(SearchResponse response) throws IOException { SearchHit[] hits = response.getHits().getHits(); TypeFactory factory = TypeFactory.defaultInstance(); MapType type = factory.constructMapType(HashMap.class, String.class, String.class); List<Message> messages = new ArrayList<>(hits.length); for (SearchHit hit : hits) { String source = hit.getSourceAsString(); Map<String, String> mapSource = objectMapper.readValue(source, type); Message msg = new Message(mapSource.get("messageId"), mapSource.get("payload"), null); messages.add(msg); } return messages; } @Override public List<EventExecution> getEventExecutions(String event) { try { BoolQueryBuilder query = boolQueryBuilder("event='" + event + "'", "*"); // Create the searchObjectIdsViaExpression source SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(query); searchSourceBuilder.sort(new FieldSortBuilder("created").order(SortOrder.ASC)); // Generate the actual request to send to ES. String docType = StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; SearchRequest searchRequest = new SearchRequest(eventIndexPrefix + "*"); searchRequest.types(docType); searchRequest.source(searchSourceBuilder); SearchResponse response = elasticSearchClient.search(searchRequest); return mapEventExecutionsResponse(response); } catch (Exception e) { LOGGER.error("Failed to get executions for event: {}", event, e); } return null; } private List<EventExecution> mapEventExecutionsResponse(SearchResponse response) throws IOException { SearchHit[] hits = response.getHits().getHits(); List<EventExecution> executions = new ArrayList<>(hits.length); for (SearchHit hit : hits) { String source = hit.getSourceAsString(); EventExecution tel = objectMapper.readValue(source, EventExecution.class); executions.add(tel); } return executions; } @Override public void addMessage(String queue, Message message) { try { long startTime = Instant.now().toEpochMilli(); Map<String, Object> doc = new HashMap<>(); doc.put("messageId", message.getId()); doc.put("payload", message.getPayload()); doc.put("queue", queue); doc.put("created", System.currentTimeMillis()); String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; indexObject(messageIndexName, docType, doc); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing message: {}", endTime - startTime, message.getId()); Monitors.recordESIndexTime("add_message", MSG_DOC_TYPE, endTime - startTime); } catch (Exception e) { LOGGER.error("Failed to index message: {}", message.getId(), e); } } @Override public CompletableFuture<Void> asyncAddMessage(String queue, Message message) { return CompletableFuture.runAsync(() -> addMessage(queue, message), executorService); } @Override public void addEventExecution(EventExecution eventExecution) { try { long startTime = Instant.now().toEpochMilli(); String id = eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution.getMessageId() + "." + eventExecution.getId(); String docType = StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; indexObject(eventIndexName, docType, id, eventExecution); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing event execution: {}", endTime - startTime, eventExecution.getId()); Monitors.recordESIndexTime("add_event_execution", EVENT_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); } catch (Exception e) { LOGGER.error("Failed to index event execution: {}", eventExecution.getId(), e); } } @Override public CompletableFuture<Void> asyncAddEventExecution(EventExecution eventExecution) { return CompletableFuture.runAsync( () -> addEventExecution(eventExecution), logExecutorService); } @Override public SearchResult<String> searchWorkflows( String query, String freeText, int start, int count, List<String> sort) { try { return searchObjectsViaExpression( query, start, count, sort, freeText, WORKFLOW_DOC_TYPE, true, String.class); } catch (Exception e) { throw new TransientException(e.getMessage(), e); } } @Override public SearchResult<WorkflowSummary> searchWorkflowSummary( String query, String freeText, int start, int count, List<String> sort) { try { return searchObjectsViaExpression( query, start, count, sort, freeText, WORKFLOW_DOC_TYPE, false, WorkflowSummary.class); } catch (Exception e) { throw new TransientException(e.getMessage(), e); } } @Override public SearchResult<String> searchTasks( String query, String freeText, int start, int count, List<String> sort) { try { return searchObjectsViaExpression( query, start, count, sort, freeText, TASK_DOC_TYPE, true, String.class); } catch (Exception e) { throw new TransientException(e.getMessage(), e); } } @Override public SearchResult<TaskSummary> searchTaskSummary( String query, String freeText, int start, int count, List<String> sort) { try { return searchObjectsViaExpression( query, start, count, sort, freeText, TASK_DOC_TYPE, false, TaskSummary.class); } catch (Exception e) { throw new TransientException(e.getMessage(), e); } } @Override public void removeWorkflow(String workflowId) { long startTime = Instant.now().toEpochMilli(); String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; DeleteRequest request = new DeleteRequest(workflowIndexName, docType, workflowId); try { DeleteResponse response = elasticSearchClient.delete(request); if (response.getResult() == DocWriteResponse.Result.NOT_FOUND) { LOGGER.error("Index removal failed - document not found by id: {}", workflowId); } long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for removing workflow: {}", endTime - startTime, workflowId); Monitors.recordESIndexTime("remove_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (IOException e) { LOGGER.error("Failed to remove workflow {} from index", workflowId, e); Monitors.error(className, "remove"); } } @Override public CompletableFuture<Void> asyncRemoveWorkflow(String workflowId) { return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); } @Override public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { try { if (keys.length != values.length) { throw new IllegalArgumentException("Number of keys and values do not match"); } long startTime = Instant.now().toEpochMilli(); String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; UpdateRequest request = new UpdateRequest(workflowIndexName, docType, workflowInstanceId); Map<String, Object> source = IntStream.range(0, keys.length) .boxed() .collect(Collectors.toMap(i -> keys[i], i -> values[i])); request.doc(source); LOGGER.debug("Updating workflow {} with {}", workflowInstanceId, source); elasticSearchClient.update(request, RequestOptions.DEFAULT); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId); Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Exception e) { LOGGER.error("Failed to update workflow {}", workflowInstanceId, e); Monitors.error(className, "update"); } } @Override public CompletableFuture<Void> asyncUpdateWorkflow( String workflowInstanceId, String[] keys, Object[] values) { return CompletableFuture.runAsync( () -> updateWorkflow(workflowInstanceId, keys, values), executorService); } @Override public void removeTask(String workflowId, String taskId) { long startTime = Instant.now().toEpochMilli(); String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; SearchResult<String> taskSearchResult = searchTasks( String.format("(taskId='%s') AND (workflowId='%s')", taskId, workflowId), "*", 0, 1, null); if (taskSearchResult.getTotalHits() == 0) { LOGGER.error("Task: {} does not belong to workflow: {}", taskId, workflowId); Monitors.error(className, "removeTask"); return; } DeleteRequest request = new DeleteRequest(taskIndexName, docType, taskId); try { DeleteResponse response = elasticSearchClient.delete(request); if (response.getResult() != DocWriteResponse.Result.DELETED) { LOGGER.error("Index removal failed - task not found by id: {}", workflowId); Monitors.error(className, "removeTask"); return; } long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for removing task:{} of workflow: {}", endTime - startTime, taskId, workflowId); Monitors.recordESIndexTime("remove_task", docType, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (IOException e) { LOGGER.error( "Failed to remove task {} of workflow: {} from index", taskId, workflowId, e); Monitors.error(className, "removeTask"); } } @Override public CompletableFuture<Void> asyncRemoveTask(String workflowId, String taskId) { return CompletableFuture.runAsync(() -> removeTask(workflowId, taskId), executorService); } @Override public void updateTask(String workflowId, String taskId, String[] keys, Object[] values) { try { if (keys.length != values.length) { throw new IllegalArgumentException("Number of keys and values do not match"); } long startTime = Instant.now().toEpochMilli(); String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; UpdateRequest request = new UpdateRequest(taskIndexName, docType, taskId); Map<String, Object> source = IntStream.range(0, keys.length) .boxed() .collect(Collectors.toMap(i -> keys[i], i -> values[i])); request.doc(source); LOGGER.debug("Updating task: {} of workflow: {} with {}", taskId, workflowId, source); elasticSearchClient.update(request, RequestOptions.DEFAULT); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for updating task: {} of workflow: {}", endTime - startTime, taskId, workflowId); Monitors.recordESIndexTime("update_task", docType, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Exception e) { LOGGER.error("Failed to update task: {} of workflow: {}", taskId, workflowId, e); Monitors.error(className, "update"); } } @Override public CompletableFuture<Void> asyncUpdateTask( String workflowId, String taskId, String[] keys, Object[] values) { return CompletableFuture.runAsync( () -> updateTask(workflowId, taskId, keys, values), executorService); } @Override public String get(String workflowInstanceId, String fieldToGet) { String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; GetRequest request = new GetRequest(workflowIndexName, docType, workflowInstanceId); GetResponse response; try { response = elasticSearchClient.get(request); } catch (IOException e) { LOGGER.error( "Unable to get Workflow: {} from ElasticSearch index: {}", workflowInstanceId, workflowIndexName, e); return null; } if (response.isExists()) { Map<String, Object> sourceAsMap = response.getSourceAsMap(); if (sourceAsMap.get(fieldToGet) != null) { return sourceAsMap.get(fieldToGet).toString(); } } LOGGER.debug( "Unable to find Workflow: {} in ElasticSearch index: {}.", workflowInstanceId, workflowIndexName); return null; } private <T> SearchResult<T> searchObjectsViaExpression( String structuredQuery, int start, int size, List<String> sortOptions, String freeTextQuery, String docType, boolean idOnly, Class<T> clazz) throws ParserException, IOException { QueryBuilder queryBuilder = boolQueryBuilder(structuredQuery, freeTextQuery); return searchObjects( getIndexName(docType), queryBuilder, start, size, sortOptions, docType, idOnly, clazz); } private SearchResult<String> searchObjectIds( String indexName, QueryBuilder queryBuilder, int start, int size, String docType) throws IOException { return searchObjects( indexName, queryBuilder, start, size, null, docType, true, String.class); } /** * Tries to find objects for a given query in an index. * * @param indexName The name of the index. * @param queryBuilder The query to use for searching. * @param start The start to use. * @param size The total return size. * @param sortOptions A list of string options to sort in the form VALUE:ORDER; where ORDER is * optional and can be either ASC OR DESC. * @param docType The document type to searchObjectIdsViaExpression for. * @return The SearchResults which includes the count and objects that were found. * @throws IOException If we cannot communicate with ES. */ private <T> SearchResult<T> searchObjects( String indexName, QueryBuilder queryBuilder, int start, int size, List<String> sortOptions, String docType, boolean idOnly, Class<T> clazz) throws IOException { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(queryBuilder); searchSourceBuilder.from(start); searchSourceBuilder.size(size); if (idOnly) { searchSourceBuilder.fetchSource(false); } if (sortOptions != null && !sortOptions.isEmpty()) { for (String sortOption : sortOptions) { SortOrder order = SortOrder.ASC; String field = sortOption; int index = sortOption.indexOf(":"); if (index > 0) { field = sortOption.substring(0, index); order = SortOrder.valueOf(sortOption.substring(index + 1)); } searchSourceBuilder.sort(new FieldSortBuilder(field).order(order)); } } // Generate the actual request to send to ES. docType = StringUtils.isBlank(docTypeOverride) ? docType : docTypeOverride; SearchRequest searchRequest = new SearchRequest(indexName); searchRequest.types(docType); searchRequest.source(searchSourceBuilder); SearchResponse response = elasticSearchClient.search(searchRequest); return mapSearchResult(response, idOnly, clazz); } private <T> SearchResult<T> mapSearchResult( SearchResponse response, boolean idOnly, Class<T> clazz) { SearchHits searchHits = response.getHits(); long count = searchHits.getTotalHits(); List<T> result; if (idOnly) { result = Arrays.stream(searchHits.getHits()) .map(hit -> clazz.cast(hit.getId())) .collect(Collectors.toList()); } else { result = Arrays.stream(searchHits.getHits()) .map( hit -> { try { return objectMapper.readValue( hit.getSourceAsString(), clazz); } catch (JsonProcessingException e) { LOGGER.error( "Failed to de-serialize elasticsearch from source: {}", hit.getSourceAsString(), e); } return null; }) .collect(Collectors.toList()); } return new SearchResult<>(count, result); } @Override public List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays) { QueryBuilder q = QueryBuilders.boolQuery() .must( QueryBuilders.rangeQuery("endTime") .lt(LocalDate.now().minusDays(archiveTtlDays).toString()) .gte( LocalDate.now() .minusDays(archiveTtlDays) .minusDays(1) .toString())) .should(QueryBuilders.termQuery("status", "COMPLETED")) .should(QueryBuilders.termQuery("status", "FAILED")) .should(QueryBuilders.termQuery("status", "TIMED_OUT")) .should(QueryBuilders.termQuery("status", "TERMINATED")) .mustNot(QueryBuilders.existsQuery("archived")) .minimumShouldMatch(1); SearchResult<String> workflowIds; try { workflowIds = searchObjectIds(indexName, q, 0, 1000, WORKFLOW_DOC_TYPE); } catch (IOException e) { LOGGER.error("Unable to communicate with ES to find archivable workflows", e); return Collections.emptyList(); } return workflowIds.getResults(); } @Override public long getWorkflowCount(String query, String freeText) { try { return getObjectCounts(query, freeText, WORKFLOW_DOC_TYPE); } catch (Exception e) { throw new TransientException(e.getMessage(), e); } } private long getObjectCounts(String structuredQuery, String freeTextQuery, String docType) throws ParserException, IOException { QueryBuilder queryBuilder = boolQueryBuilder(structuredQuery, freeTextQuery); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.query(queryBuilder); String indexName = getIndexName(docType); CountRequest countRequest = new CountRequest(new String[] {indexName}, sourceBuilder); CountResponse countResponse = elasticSearchClient.count(countRequest, RequestOptions.DEFAULT); return countResponse.getCount(); } private void indexObject(final String index, final String docType, final Object doc) { indexObject(index, docType, null, doc); } private void indexObject( final String index, final String docType, final String docId, final Object doc) { byte[] docBytes; try { docBytes = objectMapper.writeValueAsBytes(doc); } catch (JsonProcessingException e) { LOGGER.error("Failed to convert {} '{}' to byte string", docType, docId); return; } IndexRequest request = new IndexRequest(index, docType, docId); request.source(docBytes, XContentType.JSON); if (bulkRequests.get(docType) == null) { bulkRequests.put( docType, new BulkRequests(System.currentTimeMillis(), new BulkRequest())); } bulkRequests.get(docType).getBulkRequest().add(request); if (bulkRequests.get(docType).getBulkRequest().numberOfActions() >= this.indexBatchSize) { indexBulkRequest(docType); } } private synchronized void indexBulkRequest(String docType) { if (bulkRequests.get(docType).getBulkRequest() != null && bulkRequests.get(docType).getBulkRequest().numberOfActions() > 0) { synchronized (bulkRequests.get(docType).getBulkRequest()) { indexWithRetry( bulkRequests.get(docType).getBulkRequest().get(), "Bulk Indexing " + docType, docType); bulkRequests.put( docType, new BulkRequests(System.currentTimeMillis(), new BulkRequest())); } } } /** * Performs an index operation with a retry. * * @param request The index request that we want to perform. * @param operationDescription The type of operation that we are performing. */ private void indexWithRetry( final BulkRequest request, final String operationDescription, String docType) { try { long startTime = Instant.now().toEpochMilli(); retryTemplate.execute( context -> elasticSearchClient.bulk(request, RequestOptions.DEFAULT)); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing object of type: {}", endTime - startTime, docType); Monitors.recordESIndexTime("index_object", docType, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); Monitors.recordWorkerQueueSize( "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); } catch (Exception e) { Monitors.error(className, "index"); LOGGER.error("Failed to index {} for request type: {}", request, docType, e); } } /** * Flush the buffers if bulk requests have not been indexed for the past {@link * ElasticSearchProperties#getAsyncBufferFlushTimeout()} seconds. This is to prevent data loss * in case the instance is terminated, while the buffer still holds documents to be indexed. */ private void flushBulkRequests() { bulkRequests.entrySet().stream() .filter( entry -> (System.currentTimeMillis() - entry.getValue().getLastFlushTime()) >= asyncBufferFlushTimeout) .filter( entry -> entry.getValue().getBulkRequest() != null && entry.getValue().getBulkRequest().numberOfActions() > 0) .forEach( entry -> { LOGGER.debug( "Flushing bulk request buffer for type {}, size: {}", entry.getKey(), entry.getValue().getBulkRequest().numberOfActions()); indexBulkRequest(entry.getKey()); }); } private static class BulkRequests { private final long lastFlushTime; private final BulkRequestWrapper bulkRequest; long getLastFlushTime() { return lastFlushTime; } BulkRequestWrapper getBulkRequest() { return bulkRequest; } BulkRequests(long lastFlushTime, BulkRequest bulkRequest) { this.lastFlushTime = lastFlushTime; this.bulkRequest = new BulkRequestWrapper(bulkRequest); } } }
6,949
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/BulkRequestBuilderWrapper.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.util.Objects; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.springframework.lang.NonNull; /** Thread-safe wrapper for {@link BulkRequestBuilder}. */ public class BulkRequestBuilderWrapper { private final BulkRequestBuilder bulkRequestBuilder; public BulkRequestBuilderWrapper(@NonNull BulkRequestBuilder bulkRequestBuilder) { this.bulkRequestBuilder = Objects.requireNonNull(bulkRequestBuilder); } public void add(@NonNull UpdateRequest req) { synchronized (bulkRequestBuilder) { bulkRequestBuilder.add(Objects.requireNonNull(req)); } } public void add(@NonNull IndexRequest req) { synchronized (bulkRequestBuilder) { bulkRequestBuilder.add(Objects.requireNonNull(req)); } } public int numberOfActions() { synchronized (bulkRequestBuilder) { return bulkRequestBuilder.numberOfActions(); } } public ActionFuture<BulkResponse> execute() { synchronized (bulkRequestBuilder) { return bulkRequestBuilder.execute(); } } }
6,950
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchBaseDAO.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.io.IOException; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.es6.dao.query.parser.Expression; import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; abstract class ElasticSearchBaseDAO implements IndexDAO { String indexPrefix; String loadTypeMappingSource(String path) throws IOException { return applyIndexPrefixToTemplate( IOUtils.toString(ElasticSearchBaseDAO.class.getResourceAsStream(path))); } private String applyIndexPrefixToTemplate(String text) { String pattern = "\"template\": \"\\*(.*)\\*\""; Pattern r = Pattern.compile(pattern); Matcher m = r.matcher(text); StringBuilder sb = new StringBuilder(); while (m.find()) { m.appendReplacement( sb, m.group(0) .replaceFirst( Pattern.quote(m.group(1)), indexPrefix + "_" + m.group(1))); } m.appendTail(sb); return sb.toString(); } BoolQueryBuilder boolQueryBuilder(String expression, String queryString) throws ParserException { QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); if (StringUtils.isNotEmpty(expression)) { Expression exp = Expression.fromString(expression); queryBuilder = exp.getFilterBuilder(); } BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(queryString); return QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); } protected String getIndexName(String documentType) { return indexPrefix + "_" + documentType; } }
6,951
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/index/ElasticSearchDAOV6.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.index; import java.io.IOException; import java.text.SimpleDateFormat; import java.time.Instant; import java.time.LocalDate; import java.util.*; import java.util.concurrent.*; import java.util.stream.Collectors; import java.util.stream.IntStream; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import org.apache.commons.lang3.StringUtils; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.retry.support.RetryTemplate; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.exception.TransientException; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.es6.config.ElasticSearchProperties; import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; import com.netflix.conductor.metrics.Monitors; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.type.MapType; import com.fasterxml.jackson.databind.type.TypeFactory; @Trace public class ElasticSearchDAOV6 extends ElasticSearchBaseDAO implements IndexDAO { private static final Logger LOGGER = LoggerFactory.getLogger(ElasticSearchDAOV6.class); private static final String WORKFLOW_DOC_TYPE = "workflow"; private static final String TASK_DOC_TYPE = "task"; private static final String LOG_DOC_TYPE = "task_log"; private static final String EVENT_DOC_TYPE = "event"; private static final String MSG_DOC_TYPE = "message"; private static final int CORE_POOL_SIZE = 6; private static final long KEEP_ALIVE_TIME = 1L; private static final int UPDATE_REQUEST_RETRY_COUNT = 5; private static final String CLASS_NAME = ElasticSearchDAOV6.class.getSimpleName(); private final String workflowIndexName; private final String taskIndexName; private final String eventIndexPrefix; private String eventIndexName; private final String messageIndexPrefix; private String messageIndexName; private String logIndexName; private final String logIndexPrefix; private final String docTypeOverride; private final ObjectMapper objectMapper; private final Client elasticSearchClient; private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); private final ExecutorService executorService; private final ExecutorService logExecutorService; private final ConcurrentHashMap<String, BulkRequests> bulkRequests; private final int indexBatchSize; private final long asyncBufferFlushTimeout; private final ElasticSearchProperties properties; private final RetryTemplate retryTemplate; static { SIMPLE_DATE_FORMAT.setTimeZone(GMT); } public ElasticSearchDAOV6( Client elasticSearchClient, RetryTemplate retryTemplate, ElasticSearchProperties properties, ObjectMapper objectMapper) { this.objectMapper = objectMapper; this.elasticSearchClient = elasticSearchClient; this.indexPrefix = properties.getIndexPrefix(); this.workflowIndexName = getIndexName(WORKFLOW_DOC_TYPE); this.taskIndexName = getIndexName(TASK_DOC_TYPE); this.logIndexPrefix = this.indexPrefix + "_" + LOG_DOC_TYPE; this.messageIndexPrefix = this.indexPrefix + "_" + MSG_DOC_TYPE; this.eventIndexPrefix = this.indexPrefix + "_" + EVENT_DOC_TYPE; int workerQueueSize = properties.getAsyncWorkerQueueSize(); int maximumPoolSize = properties.getAsyncMaxPoolSize(); this.bulkRequests = new ConcurrentHashMap<>(); this.indexBatchSize = properties.getIndexBatchSize(); this.asyncBufferFlushTimeout = properties.getAsyncBufferFlushTimeout().toMillis(); this.properties = properties; if (!properties.isAutoIndexManagementEnabled() && StringUtils.isNotBlank(properties.getDocumentTypeOverride())) { docTypeOverride = properties.getDocumentTypeOverride(); } else { docTypeOverride = ""; } this.executorService = new ThreadPoolExecutor( CORE_POOL_SIZE, maximumPoolSize, KEEP_ALIVE_TIME, TimeUnit.MINUTES, new LinkedBlockingQueue<>(workerQueueSize), (runnable, executor) -> { LOGGER.warn( "Request {} to async dao discarded in executor {}", runnable, executor); Monitors.recordDiscardedIndexingCount("indexQueue"); }); int corePoolSize = 1; maximumPoolSize = 2; long keepAliveTime = 30L; this.logExecutorService = new ThreadPoolExecutor( corePoolSize, maximumPoolSize, keepAliveTime, TimeUnit.SECONDS, new LinkedBlockingQueue<>(workerQueueSize), (runnable, executor) -> { LOGGER.warn( "Request {} to async log dao discarded in executor {}", runnable, executor); Monitors.recordDiscardedIndexingCount("logQueue"); }); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate(this::flushBulkRequests, 60, 30, TimeUnit.SECONDS); this.retryTemplate = retryTemplate; } @PreDestroy private void shutdown() { LOGGER.info("Starting graceful shutdown of executor service"); shutdownExecutorService(logExecutorService); shutdownExecutorService(executorService); } private void shutdownExecutorService(ExecutorService execService) { try { execService.shutdown(); if (execService.awaitTermination(30, TimeUnit.SECONDS)) { LOGGER.debug("tasks completed, shutting down"); } else { LOGGER.warn("Forcing shutdown after waiting for 30 seconds"); execService.shutdownNow(); } } catch (InterruptedException ie) { LOGGER.warn( "Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); execService.shutdownNow(); Thread.currentThread().interrupt(); } } @Override @PostConstruct public void setup() throws Exception { waitForHealthyCluster(); if (properties.isAutoIndexManagementEnabled()) { createIndexesTemplates(); createWorkflowIndex(); createTaskIndex(); } } private void waitForHealthyCluster() throws Exception { elasticSearchClient .admin() .cluster() .prepareHealth() .setWaitForGreenStatus() .execute() .get(); } /** Initializes the indexes templates task_log, message and event, and mappings. */ private void createIndexesTemplates() { try { initIndexesTemplates(); updateIndexesNames(); Executors.newScheduledThreadPool(1) .scheduleAtFixedRate(this::updateIndexesNames, 0, 1, TimeUnit.HOURS); } catch (Exception e) { LOGGER.error("Error creating index templates", e); } } private void initIndexesTemplates() { initIndexTemplate(LOG_DOC_TYPE); initIndexTemplate(EVENT_DOC_TYPE); initIndexTemplate(MSG_DOC_TYPE); } private void initIndexTemplate(String type) { String template = "template_" + type; GetIndexTemplatesResponse result = elasticSearchClient .admin() .indices() .prepareGetTemplates(template) .execute() .actionGet(); if (result.getIndexTemplates().isEmpty()) { LOGGER.info("Creating the index template '{}'", template); try { String templateSource = loadTypeMappingSource("/" + template + ".json"); elasticSearchClient .admin() .indices() .preparePutTemplate(template) .setSource(templateSource.getBytes(), XContentType.JSON) .execute() .actionGet(); } catch (Exception e) { LOGGER.error("Failed to init " + template, e); } } } private void updateIndexesNames() { logIndexName = updateIndexName(LOG_DOC_TYPE); eventIndexName = updateIndexName(EVENT_DOC_TYPE); messageIndexName = updateIndexName(MSG_DOC_TYPE); } private String updateIndexName(String type) { String indexName = this.indexPrefix + "_" + type + "_" + SIMPLE_DATE_FORMAT.format(new Date()); createIndex(indexName); return indexName; } private void createWorkflowIndex() { createIndex(workflowIndexName); addTypeMapping(workflowIndexName, WORKFLOW_DOC_TYPE, "/mappings_docType_workflow.json"); } private void createTaskIndex() { createIndex(taskIndexName); addTypeMapping(taskIndexName, TASK_DOC_TYPE, "/mappings_docType_task.json"); } private void createIndex(String indexName) { try { elasticSearchClient .admin() .indices() .prepareGetIndex() .addIndices(indexName) .execute() .actionGet(); } catch (IndexNotFoundException infe) { try { CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); createIndexRequest.settings( Settings.builder() .put("index.number_of_shards", properties.getIndexShardCount()) .put( "index.number_of_replicas", properties.getIndexReplicasCount())); elasticSearchClient.admin().indices().create(createIndexRequest).actionGet(); } catch (ResourceAlreadyExistsException done) { LOGGER.error("Failed to update log index name: {}", indexName, done); } } } private void addTypeMapping(String indexName, String type, String sourcePath) { GetMappingsResponse getMappingsResponse = elasticSearchClient .admin() .indices() .prepareGetMappings(indexName) .addTypes(type) .execute() .actionGet(); if (getMappingsResponse.mappings().isEmpty()) { LOGGER.info("Adding the {} type mappings", indexName); try { String source = loadTypeMappingSource(sourcePath); elasticSearchClient .admin() .indices() .preparePutMapping(indexName) .setType(type) .setSource(source, XContentType.JSON) .execute() .actionGet(); } catch (Exception e) { LOGGER.error("Failed to init index " + indexName + " mappings", e); } } } @Override public void indexWorkflow(WorkflowSummary workflow) { try { long startTime = Instant.now().toEpochMilli(); String id = workflow.getWorkflowId(); byte[] doc = objectMapper.writeValueAsBytes(workflow); String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; UpdateRequest req = buildUpdateRequest(id, doc, workflowIndexName, docType); elasticSearchClient.update(req).actionGet(); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing workflow: {}", endTime - startTime, workflow.getWorkflowId()); Monitors.recordESIndexTime("index_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Exception e) { Monitors.error(CLASS_NAME, "indexWorkflow"); LOGGER.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); } } @Override public CompletableFuture<Void> asyncIndexWorkflow(WorkflowSummary workflow) { return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); } @Override public void indexTask(TaskSummary task) { try { long startTime = Instant.now().toEpochMilli(); String id = task.getTaskId(); byte[] doc = objectMapper.writeValueAsBytes(task); String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; UpdateRequest req = new UpdateRequest(taskIndexName, docType, id); req.doc(doc, XContentType.JSON); req.upsert(doc, XContentType.JSON); indexObject(req, TASK_DOC_TYPE); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing task:{} in workflow: {}", endTime - startTime, task.getTaskId(), task.getWorkflowId()); Monitors.recordESIndexTime("index_task", TASK_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Exception e) { LOGGER.error("Failed to index task: {}", task.getTaskId(), e); } } @Override public CompletableFuture<Void> asyncIndexTask(TaskSummary task) { return CompletableFuture.runAsync(() -> indexTask(task), executorService); } private void indexObject(UpdateRequest req, String docType) { if (bulkRequests.get(docType) == null) { bulkRequests.put( docType, new BulkRequests( System.currentTimeMillis(), elasticSearchClient.prepareBulk())); } bulkRequests.get(docType).getBulkRequestBuilder().add(req); if (bulkRequests.get(docType).getBulkRequestBuilder().numberOfActions() >= this.indexBatchSize) { indexBulkRequest(docType); } } private synchronized void indexBulkRequest(String docType) { if (bulkRequests.get(docType).getBulkRequestBuilder() != null && bulkRequests.get(docType).getBulkRequestBuilder().numberOfActions() > 0) { updateWithRetry(bulkRequests.get(docType).getBulkRequestBuilder(), docType); bulkRequests.put( docType, new BulkRequests( System.currentTimeMillis(), elasticSearchClient.prepareBulk())); } } @Override public void addTaskExecutionLogs(List<TaskExecLog> taskExecLogs) { if (taskExecLogs.isEmpty()) { return; } try { long startTime = Instant.now().toEpochMilli(); BulkRequestBuilderWrapper bulkRequestBuilder = new BulkRequestBuilderWrapper(elasticSearchClient.prepareBulk()); for (TaskExecLog log : taskExecLogs) { String docType = StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; IndexRequest request = new IndexRequest(logIndexName, docType); request.source(objectMapper.writeValueAsBytes(log), XContentType.JSON); bulkRequestBuilder.add(request); } bulkRequestBuilder.execute().actionGet(5, TimeUnit.SECONDS); long endTime = Instant.now().toEpochMilli(); LOGGER.debug("Time taken {} for indexing taskExecutionLogs", endTime - startTime); Monitors.recordESIndexTime( "index_task_execution_logs", LOG_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); } catch (Exception e) { List<String> taskIds = taskExecLogs.stream().map(TaskExecLog::getTaskId).collect(Collectors.toList()); LOGGER.error("Failed to index task execution logs for tasks: {}", taskIds, e); } } @Override public CompletableFuture<Void> asyncAddTaskExecutionLogs(List<TaskExecLog> logs) { return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), logExecutorService); } @Override public List<TaskExecLog> getTaskExecutionLogs(String taskId) { try { BoolQueryBuilder query = boolQueryBuilder("taskId='" + taskId + "'", "*"); String docType = StringUtils.isBlank(docTypeOverride) ? LOG_DOC_TYPE : docTypeOverride; final SearchRequestBuilder srb = elasticSearchClient .prepareSearch(logIndexPrefix + "*") .setQuery(query) .setTypes(docType) .setSize(properties.getTaskLogResultLimit()) .addSort(SortBuilders.fieldSort("createdTime").order(SortOrder.ASC)); return mapTaskExecLogsResponse(srb.execute().actionGet()); } catch (Exception e) { LOGGER.error("Failed to get task execution logs for task: {}", taskId, e); } return null; } private List<TaskExecLog> mapTaskExecLogsResponse(SearchResponse response) throws IOException { SearchHit[] hits = response.getHits().getHits(); List<TaskExecLog> logs = new ArrayList<>(hits.length); for (SearchHit hit : hits) { String source = hit.getSourceAsString(); TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); logs.add(tel); } return logs; } @Override public void addMessage(String queue, Message message) { try { long startTime = Instant.now().toEpochMilli(); Map<String, Object> doc = new HashMap<>(); doc.put("messageId", message.getId()); doc.put("payload", message.getPayload()); doc.put("queue", queue); doc.put("created", System.currentTimeMillis()); String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; UpdateRequest req = new UpdateRequest(messageIndexName, docType, message.getId()); req.doc(doc, XContentType.JSON); req.upsert(doc, XContentType.JSON); indexObject(req, MSG_DOC_TYPE); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing message: {}", endTime - startTime, message.getId()); Monitors.recordESIndexTime("add_message", MSG_DOC_TYPE, endTime - startTime); } catch (Exception e) { LOGGER.error("Failed to index message: {}", message.getId(), e); } } @Override public CompletableFuture<Void> asyncAddMessage(String queue, Message message) { return CompletableFuture.runAsync(() -> addMessage(queue, message), executorService); } @Override public List<Message> getMessages(String queue) { try { BoolQueryBuilder fq = boolQueryBuilder("queue='" + queue + "'", "*"); String docType = StringUtils.isBlank(docTypeOverride) ? MSG_DOC_TYPE : docTypeOverride; final SearchRequestBuilder srb = elasticSearchClient .prepareSearch(messageIndexPrefix + "*") .setQuery(fq) .setTypes(docType) .addSort(SortBuilders.fieldSort("created").order(SortOrder.ASC)); return mapGetMessagesResponse(srb.execute().actionGet()); } catch (Exception e) { LOGGER.error("Failed to get messages for queue: {}", queue, e); } return null; } private List<Message> mapGetMessagesResponse(SearchResponse response) throws IOException { SearchHit[] hits = response.getHits().getHits(); TypeFactory factory = TypeFactory.defaultInstance(); MapType type = factory.constructMapType(HashMap.class, String.class, String.class); List<Message> messages = new ArrayList<>(hits.length); for (SearchHit hit : hits) { String source = hit.getSourceAsString(); Map<String, String> mapSource = objectMapper.readValue(source, type); Message msg = new Message(mapSource.get("messageId"), mapSource.get("payload"), null); messages.add(msg); } return messages; } @Override public void addEventExecution(EventExecution eventExecution) { try { long startTime = Instant.now().toEpochMilli(); byte[] doc = objectMapper.writeValueAsBytes(eventExecution); String id = eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution.getMessageId() + "." + eventExecution.getId(); String docType = StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; UpdateRequest req = buildUpdateRequest(id, doc, eventIndexName, docType); indexObject(req, EVENT_DOC_TYPE); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing event execution: {}", endTime - startTime, eventExecution.getId()); Monitors.recordESIndexTime("add_event_execution", EVENT_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); } catch (Exception e) { LOGGER.error("Failed to index event execution: {}", eventExecution.getId(), e); } } @Override public CompletableFuture<Void> asyncAddEventExecution(EventExecution eventExecution) { return CompletableFuture.runAsync( () -> addEventExecution(eventExecution), logExecutorService); } @Override public List<EventExecution> getEventExecutions(String event) { try { BoolQueryBuilder fq = boolQueryBuilder("event='" + event + "'", "*"); String docType = StringUtils.isBlank(docTypeOverride) ? EVENT_DOC_TYPE : docTypeOverride; final SearchRequestBuilder srb = elasticSearchClient .prepareSearch(eventIndexPrefix + "*") .setQuery(fq) .setTypes(docType) .addSort(SortBuilders.fieldSort("created").order(SortOrder.ASC)); return mapEventExecutionsResponse(srb.execute().actionGet()); } catch (Exception e) { LOGGER.error("Failed to get executions for event: {}", event, e); } return null; } private List<EventExecution> mapEventExecutionsResponse(SearchResponse response) throws IOException { SearchHit[] hits = response.getHits().getHits(); List<EventExecution> executions = new ArrayList<>(hits.length); for (SearchHit hit : hits) { String source = hit.getSourceAsString(); EventExecution tel = objectMapper.readValue(source, EventExecution.class); executions.add(tel); } return executions; } private void updateWithRetry(BulkRequestBuilderWrapper request, String docType) { try { long startTime = Instant.now().toEpochMilli(); retryTemplate.execute(context -> request.execute().actionGet(5, TimeUnit.SECONDS)); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for indexing object of type: {}", endTime - startTime, docType); Monitors.recordESIndexTime("index_object", docType, endTime - startTime); } catch (Exception e) { Monitors.error(CLASS_NAME, "index"); LOGGER.error("Failed to index {} for requests", request.numberOfActions(), e); } } @Override public SearchResult<String> searchWorkflows( String query, String freeText, int start, int count, List<String> sort) { return search(query, start, count, sort, freeText, WORKFLOW_DOC_TYPE, true, String.class); } @Override public SearchResult<WorkflowSummary> searchWorkflowSummary( String query, String freeText, int start, int count, List<String> sort) { return search( query, start, count, sort, freeText, WORKFLOW_DOC_TYPE, false, WorkflowSummary.class); } @Override public long getWorkflowCount(String query, String freeText) { return count(query, freeText, WORKFLOW_DOC_TYPE); } @Override public SearchResult<String> searchTasks( String query, String freeText, int start, int count, List<String> sort) { return search(query, start, count, sort, freeText, TASK_DOC_TYPE, true, String.class); } @Override public SearchResult<TaskSummary> searchTaskSummary( String query, String freeText, int start, int count, List<String> sort) { return search(query, start, count, sort, freeText, TASK_DOC_TYPE, false, TaskSummary.class); } @Override public void removeWorkflow(String workflowId) { try { long startTime = Instant.now().toEpochMilli(); DeleteRequest request = new DeleteRequest(workflowIndexName, WORKFLOW_DOC_TYPE, workflowId); DeleteResponse response = elasticSearchClient.delete(request).actionGet(); if (response.getResult() == DocWriteResponse.Result.DELETED) { LOGGER.error("Index removal failed - document not found by id: {}", workflowId); } long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for removing workflow: {}", endTime - startTime, workflowId); Monitors.recordESIndexTime("remove_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Throwable e) { LOGGER.error("Failed to remove workflow {} from index", workflowId, e); Monitors.error(CLASS_NAME, "remove"); } } @Override public CompletableFuture<Void> asyncRemoveWorkflow(String workflowId) { return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); } @Override public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { if (keys.length != values.length) { throw new IllegalArgumentException("Number of keys and values do not match"); } long startTime = Instant.now().toEpochMilli(); UpdateRequest request = new UpdateRequest(workflowIndexName, WORKFLOW_DOC_TYPE, workflowInstanceId); Map<String, Object> source = IntStream.range(0, keys.length) .boxed() .collect(Collectors.toMap(i -> keys[i], i -> values[i])); request.doc(source); LOGGER.debug( "Updating workflow {} in elasticsearch index: {}", workflowInstanceId, workflowIndexName); elasticSearchClient.update(request).actionGet(); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId); Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } @Override public CompletableFuture<Void> asyncUpdateWorkflow( String workflowInstanceId, String[] keys, Object[] values) { return CompletableFuture.runAsync( () -> updateWorkflow(workflowInstanceId, keys, values), executorService); } @Override public void removeTask(String workflowId, String taskId) { try { long startTime = Instant.now().toEpochMilli(); String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; SearchResult<String> taskSearchResult = searchTasks( String.format( "(taskId='%s') AND (workflowId='%s')", taskId, workflowId), "*", 0, 1, null); if (taskSearchResult.getTotalHits() == 0) { LOGGER.error("Task: {} does not belong to workflow: {}", taskId, workflowId); Monitors.error(CLASS_NAME, "removeTask"); return; } DeleteRequest request = new DeleteRequest(taskIndexName, docType, taskId); DeleteResponse response = elasticSearchClient.delete(request).actionGet(); long endTime = Instant.now().toEpochMilli(); if (response.getResult() != DocWriteResponse.Result.DELETED) { LOGGER.error( "Index removal failed - task not found by id: {} of workflow: {}", taskId, workflowId); Monitors.error(CLASS_NAME, "removeTask"); return; } LOGGER.debug( "Time taken {} for removing task:{} of workflow: {}", endTime - startTime, taskId, workflowId); Monitors.recordESIndexTime("remove_task", docType, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } catch (Exception e) { LOGGER.error( "Failed to remove task: {} of workflow: {} from index", taskId, workflowId, e); Monitors.error(CLASS_NAME, "removeTask"); } } @Override public CompletableFuture<Void> asyncRemoveTask(String workflowId, String taskId) { return CompletableFuture.runAsync(() -> removeTask(workflowId, taskId), executorService); } @Override public void updateTask(String workflowId, String taskId, String[] keys, Object[] values) { if (keys.length != values.length) { throw new IllegalArgumentException("Number of keys and values do not match"); } long startTime = Instant.now().toEpochMilli(); String docType = StringUtils.isBlank(docTypeOverride) ? TASK_DOC_TYPE : docTypeOverride; UpdateRequest request = new UpdateRequest(taskIndexName, docType, taskId); Map<String, Object> source = IntStream.range(0, keys.length) .boxed() .collect(Collectors.toMap(i -> keys[i], i -> values[i])); request.doc(source); LOGGER.debug( "Updating task: {} of workflow: {} in elasticsearch index: {}", taskId, workflowId, taskIndexName); elasticSearchClient.update(request).actionGet(); long endTime = Instant.now().toEpochMilli(); LOGGER.debug( "Time taken {} for updating task: {} of workflow: {}", endTime - startTime, taskId, workflowId); Monitors.recordESIndexTime("update_task", docType, endTime - startTime); Monitors.recordWorkerQueueSize( "indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); } @Override public CompletableFuture<Void> asyncUpdateTask( String workflowId, String taskId, String[] keys, Object[] values) { return CompletableFuture.runAsync( () -> updateTask(workflowId, taskId, keys, values), executorService); } @Override public String get(String workflowInstanceId, String fieldToGet) { String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; GetRequest request = new GetRequest(workflowIndexName, docType, workflowInstanceId) .fetchSourceContext( new FetchSourceContext( true, new String[] {fieldToGet}, Strings.EMPTY_ARRAY)); GetResponse response = elasticSearchClient.get(request).actionGet(); if (response.isExists()) { Map<String, Object> sourceAsMap = response.getSourceAsMap(); if (sourceAsMap.get(fieldToGet) != null) { return sourceAsMap.get(fieldToGet).toString(); } } LOGGER.debug( "Unable to find Workflow: {} in ElasticSearch index: {}.", workflowInstanceId, workflowIndexName); return null; } private long count(String structuredQuery, String freeTextQuery, String docType) { try { docType = StringUtils.isBlank(docTypeOverride) ? docType : docTypeOverride; BoolQueryBuilder fq = boolQueryBuilder(structuredQuery, freeTextQuery); // The count api has been removed from the Java api, use the search api instead and set // size to 0. final SearchRequestBuilder srb = elasticSearchClient .prepareSearch(getIndexName(docType)) .setQuery(fq) .setTypes(docType) .storedFields("_id") .setSize(0); SearchResponse response = srb.get(); return response.getHits().getTotalHits(); } catch (ParserException e) { throw new TransientException(e.getMessage(), e); } } private <T> SearchResult<T> search( String structuredQuery, int start, int size, List<String> sortOptions, String freeTextQuery, String docType, boolean idOnly, Class<T> clazz) { try { docType = StringUtils.isBlank(docTypeOverride) ? docType : docTypeOverride; BoolQueryBuilder fq = boolQueryBuilder(structuredQuery, freeTextQuery); final SearchRequestBuilder srb = elasticSearchClient .prepareSearch(getIndexName(docType)) .setQuery(fq) .setTypes(docType) .setFrom(start) .setSize(size); if (idOnly) { srb.storedFields("_id"); } addSortOptions(srb, sortOptions); return mapSearchResult(srb.get(), idOnly, clazz); } catch (ParserException e) { throw new TransientException(e.getMessage(), e); } } private void addSortOptions(SearchRequestBuilder srb, List<String> sortOptions) { if (sortOptions != null) { sortOptions.forEach( sortOption -> { SortOrder order = SortOrder.ASC; String field = sortOption; int indx = sortOption.indexOf(':'); // Can't be 0, need the field name at-least if (indx > 0) { field = sortOption.substring(0, indx); order = SortOrder.valueOf(sortOption.substring(indx + 1)); } srb.addSort(field, order); }); } } private <T> SearchResult<T> mapSearchResult( SearchResponse response, boolean idOnly, Class<T> clazz) { SearchHits searchHits = response.getHits(); long count = searchHits.getTotalHits(); List<T> result; if (idOnly) { result = Arrays.stream(searchHits.getHits()) .map(hit -> clazz.cast(hit.getId())) .collect(Collectors.toList()); } else { result = Arrays.stream(searchHits.getHits()) .map( hit -> { try { return objectMapper.readValue( hit.getSourceAsString(), clazz); } catch (JsonProcessingException e) { LOGGER.error( "Failed to de-serialize elasticsearch from source: {}", hit.getSourceAsString(), e); } return null; }) .collect(Collectors.toList()); } return new SearchResult<>(count, result); } @Override public List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays) { QueryBuilder q = QueryBuilders.boolQuery() .must( QueryBuilders.rangeQuery("endTime") .lt(LocalDate.now().minusDays(archiveTtlDays).toString()) .gte( LocalDate.now() .minusDays(archiveTtlDays) .minusDays(1) .toString())) .should(QueryBuilders.termQuery("status", "COMPLETED")) .should(QueryBuilders.termQuery("status", "FAILED")) .should(QueryBuilders.termQuery("status", "TIMED_OUT")) .should(QueryBuilders.termQuery("status", "TERMINATED")) .mustNot(QueryBuilders.existsQuery("archived")) .minimumShouldMatch(1); String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride; SearchRequestBuilder s = elasticSearchClient .prepareSearch(indexName) .setTypes(docType) .setQuery(q) .setSize(1000); return extractSearchIds(s); } private UpdateRequest buildUpdateRequest( String id, byte[] doc, String indexName, String docType) { UpdateRequest req = new UpdateRequest(indexName, docType, id); req.doc(doc, XContentType.JSON); req.upsert(doc, XContentType.JSON); req.retryOnConflict(UPDATE_REQUEST_RETRY_COUNT); return req; } private List<String> extractSearchIds(SearchRequestBuilder s) { SearchResponse response = s.execute().actionGet(); SearchHits hits = response.getHits(); List<String> ids = new LinkedList<>(); for (SearchHit hit : hits.getHits()) { ids.add(hit.getId()); } return ids; } /** * Flush the buffers if bulk requests have not been indexed for the past {@link * ElasticSearchProperties#getAsyncBufferFlushTimeout()} seconds. This is to prevent data loss * in case the instance is terminated, while the buffer still holds documents to be indexed. */ private void flushBulkRequests() { bulkRequests.entrySet().stream() .filter( entry -> (System.currentTimeMillis() - entry.getValue().getLastFlushTime()) >= asyncBufferFlushTimeout) .filter( entry -> entry.getValue().getBulkRequestBuilder() != null && entry.getValue() .getBulkRequestBuilder() .numberOfActions() > 0) .forEach( entry -> { LOGGER.debug( "Flushing bulk request buffer for type {}, size: {}", entry.getKey(), entry.getValue().getBulkRequestBuilder().numberOfActions()); indexBulkRequest(entry.getKey()); }); } private static class BulkRequests { private long lastFlushTime; private BulkRequestBuilderWrapper bulkRequestBuilder; public long getLastFlushTime() { return lastFlushTime; } public void setLastFlushTime(long lastFlushTime) { this.lastFlushTime = lastFlushTime; } public BulkRequestBuilderWrapper getBulkRequestBuilder() { return bulkRequestBuilder; } public void setBulkRequestBuilder(BulkRequestBuilder bulkRequestBuilder) { this.bulkRequestBuilder = new BulkRequestBuilderWrapper(bulkRequestBuilder); } BulkRequests(long lastFlushTime, BulkRequestBuilder bulkRequestBuilder) { this.lastFlushTime = lastFlushTime; this.bulkRequestBuilder = new BulkRequestBuilderWrapper(bulkRequestBuilder); } } }
6,952
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/GroupedExpression.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser; import java.io.InputStream; import org.elasticsearch.index.query.QueryBuilder; import com.netflix.conductor.es6.dao.query.parser.internal.AbstractNode; import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; public class GroupedExpression extends AbstractNode implements FilterProvider { private Expression expression; public GroupedExpression(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { byte[] peeked = read(1); assertExpected(peeked, "("); this.expression = new Expression(is); peeked = read(1); assertExpected(peeked, ")"); } @Override public String toString() { return "(" + expression + ")"; } /** * @return the expression */ public Expression getExpression() { return expression; } @Override public QueryBuilder getFilterBuilder() { return expression.getFilterBuilder(); } }
6,953
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/FilterProvider.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser; import org.elasticsearch.index.query.QueryBuilder; public interface FilterProvider { /** * @return FilterBuilder for elasticsearch */ public QueryBuilder getFilterBuilder(); }
6,954
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/NameValue.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser; import java.io.InputStream; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import com.netflix.conductor.es6.dao.query.parser.internal.AbstractNode; import com.netflix.conductor.es6.dao.query.parser.internal.ComparisonOp; import com.netflix.conductor.es6.dao.query.parser.internal.ComparisonOp.Operators; import com.netflix.conductor.es6.dao.query.parser.internal.ConstValue; import com.netflix.conductor.es6.dao.query.parser.internal.ListConst; import com.netflix.conductor.es6.dao.query.parser.internal.Name; import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; import com.netflix.conductor.es6.dao.query.parser.internal.Range; /** * * * <pre> * Represents an expression of the form as below: * key OPR value * OPR is the comparison operator which could be one of the following: * &gt;, &lt;, = , !=, IN, BETWEEN * </pre> */ public class NameValue extends AbstractNode implements FilterProvider { private Name name; private ComparisonOp op; private ConstValue value; private Range range; private ListConst valueList; public NameValue(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { this.name = new Name(is); this.op = new ComparisonOp(is); if (this.op.getOperator().equals(Operators.BETWEEN.value())) { this.range = new Range(is); } if (this.op.getOperator().equals(Operators.IN.value())) { this.valueList = new ListConst(is); } else { this.value = new ConstValue(is); } } @Override public String toString() { return "" + name + op + value; } /** * @return the name */ public Name getName() { return name; } /** * @return the op */ public ComparisonOp getOp() { return op; } /** * @return the value */ public ConstValue getValue() { return value; } @Override public QueryBuilder getFilterBuilder() { if (op.getOperator().equals(Operators.EQUALS.value())) { return QueryBuilders.queryStringQuery( name.getName() + ":" + value.getValue().toString()); } else if (op.getOperator().equals(Operators.BETWEEN.value())) { return QueryBuilders.rangeQuery(name.getName()) .from(range.getLow()) .to(range.getHigh()); } else if (op.getOperator().equals(Operators.IN.value())) { return QueryBuilders.termsQuery(name.getName(), valueList.getList()); } else if (op.getOperator().equals(Operators.NOT_EQUALS.value())) { return QueryBuilders.queryStringQuery( "NOT " + name.getName() + ":" + value.getValue().toString()); } else if (op.getOperator().equals(Operators.GREATER_THAN.value())) { return QueryBuilders.rangeQuery(name.getName()) .from(value.getValue()) .includeLower(false) .includeUpper(false); } else if (op.getOperator().equals(Operators.IS.value())) { if (value.getSysConstant().equals(ConstValue.SystemConsts.NULL)) { return QueryBuilders.boolQuery() .mustNot( QueryBuilders.boolQuery() .must(QueryBuilders.matchAllQuery()) .mustNot(QueryBuilders.existsQuery(name.getName()))); } else if (value.getSysConstant().equals(ConstValue.SystemConsts.NOT_NULL)) { return QueryBuilders.boolQuery() .mustNot( QueryBuilders.boolQuery() .must(QueryBuilders.matchAllQuery()) .must(QueryBuilders.existsQuery(name.getName()))); } } else if (op.getOperator().equals(Operators.LESS_THAN.value())) { return QueryBuilders.rangeQuery(name.getName()) .to(value.getValue()) .includeLower(false) .includeUpper(false); } else if (op.getOperator().equals(Operators.STARTS_WITH.value())) { return QueryBuilders.prefixQuery(name.getName(), value.getUnquotedValue()); } throw new IllegalStateException("Incorrect/unsupported operators"); } }
6,955
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/Expression.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.InputStream; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import com.netflix.conductor.es6.dao.query.parser.internal.AbstractNode; import com.netflix.conductor.es6.dao.query.parser.internal.BooleanOp; import com.netflix.conductor.es6.dao.query.parser.internal.ParserException; public class Expression extends AbstractNode implements FilterProvider { private NameValue nameVal; private GroupedExpression ge; private BooleanOp op; private Expression rhs; public Expression(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { byte[] peeked = peek(1); if (peeked[0] == '(') { this.ge = new GroupedExpression(is); } else { this.nameVal = new NameValue(is); } peeked = peek(3); if (isBoolOpr(peeked)) { // we have an expression next this.op = new BooleanOp(is); this.rhs = new Expression(is); } } public boolean isBinaryExpr() { return this.op != null; } public BooleanOp getOperator() { return this.op; } public Expression getRightHandSide() { return this.rhs; } public boolean isNameValue() { return this.nameVal != null; } public NameValue getNameValue() { return this.nameVal; } public GroupedExpression getGroupedExpression() { return this.ge; } @Override public QueryBuilder getFilterBuilder() { QueryBuilder lhs = null; if (nameVal != null) { lhs = nameVal.getFilterBuilder(); } else { lhs = ge.getFilterBuilder(); } if (this.isBinaryExpr()) { QueryBuilder rhsFilter = rhs.getFilterBuilder(); if (this.op.isAnd()) { return QueryBuilders.boolQuery().must(lhs).must(rhsFilter); } else { return QueryBuilders.boolQuery().should(lhs).should(rhsFilter); } } else { return lhs; } } @Override public String toString() { if (isBinaryExpr()) { return "" + (nameVal == null ? ge : nameVal) + op + rhs; } else { return "" + (nameVal == null ? ge : nameVal); } } public static Expression fromString(String value) throws ParserException { return new Expression(new BufferedInputStream(new ByteArrayInputStream(value.getBytes()))); } }
6,956
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/BooleanOp.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.InputStream; public class BooleanOp extends AbstractNode { private String value; public BooleanOp(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { byte[] buffer = peek(3); if (buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R') { this.value = "OR"; } else if (buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D') { this.value = "AND"; } else { throw new ParserException("No valid boolean operator found..."); } read(this.value.length()); } @Override public String toString() { return " " + value + " "; } public String getOperator() { return value; } public boolean isAnd() { return "AND".equals(value); } public boolean isOr() { return "OR".equals(value); } }
6,957
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ConstValue.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.InputStream; /** * Constant value can be: * * <ol> * <li>List of values (a,b,c) * <li>Range of values (m AND n) * <li>A value (x) * <li>A value is either a string or a number * </ol> */ public class ConstValue extends AbstractNode { public enum SystemConsts { NULL("null"), NOT_NULL("not null"); private final String value; SystemConsts(String value) { this.value = value; } public String value() { return value; } } private static final String QUOTE = "\""; private Object value; private SystemConsts sysConsts; public ConstValue(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { byte[] peeked = peek(4); String sp = new String(peeked).trim(); // Read a constant value (number or a string) if (peeked[0] == '"' || peeked[0] == '\'') { this.value = readString(is); } else if (sp.toLowerCase().startsWith("not")) { this.value = SystemConsts.NOT_NULL.value(); sysConsts = SystemConsts.NOT_NULL; read(SystemConsts.NOT_NULL.value().length()); } else if (sp.equalsIgnoreCase(SystemConsts.NULL.value())) { this.value = SystemConsts.NULL.value(); sysConsts = SystemConsts.NULL; read(SystemConsts.NULL.value().length()); } else { this.value = readNumber(is); } } private String readNumber(InputStream is) throws Exception { StringBuilder sb = new StringBuilder(); while (is.available() > 0) { is.mark(1); char c = (char) is.read(); if (!isNumeric(c)) { is.reset(); break; } else { sb.append(c); } } return sb.toString().trim(); } /** * Reads an escaped string * * @throws Exception */ private String readString(InputStream is) throws Exception { char delim = (char) read(1)[0]; StringBuilder sb = new StringBuilder(); boolean valid = false; while (is.available() > 0) { char c = (char) is.read(); if (c == delim) { valid = true; break; } else if (c == '\\') { // read the next character as part of the value c = (char) is.read(); sb.append(c); } else { sb.append(c); } } if (!valid) { throw new ParserException( "String constant is not quoted with <" + delim + "> : " + sb.toString()); } return QUOTE + sb.toString() + QUOTE; } public Object getValue() { return value; } @Override public String toString() { return "" + value; } public String getUnquotedValue() { String result = toString(); if (result.length() >= 2 && result.startsWith(QUOTE) && result.endsWith(QUOTE)) { result = result.substring(1, result.length() - 1); } return result; } public boolean isSysConstant() { return this.sysConsts != null; } public SystemConsts getSysConstant() { return this.sysConsts; } }
6,958
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ParserException.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; @SuppressWarnings("serial") public class ParserException extends Exception { public ParserException(String message) { super(message); } public ParserException(String message, Throwable cause) { super(message, cause); } }
6,959
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/AbstractNode.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.InputStream; import java.math.BigDecimal; import java.util.HashSet; import java.util.Set; import java.util.regex.Pattern; public abstract class AbstractNode { public static final Pattern WHITESPACE = Pattern.compile("\\s"); protected static Set<Character> comparisonOprs = new HashSet<>(); static { comparisonOprs.add('>'); comparisonOprs.add('<'); comparisonOprs.add('='); } protected InputStream is; protected AbstractNode(InputStream is) throws ParserException { this.is = is; this.parse(); } protected boolean isNumber(String test) { try { // If you can convert to a big decimal value, then it is a number. new BigDecimal(test); return true; } catch (NumberFormatException e) { // Ignore } return false; } protected boolean isBoolOpr(byte[] buffer) { if (buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R') { return true; } else { return buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D'; } } protected boolean isComparisonOpr(byte[] buffer) { if (buffer[0] == 'I' && buffer[1] == 'N') { return true; } else if (buffer[0] == '!' && buffer[1] == '=') { return true; } else { return comparisonOprs.contains((char) buffer[0]); } } protected byte[] peek(int length) throws Exception { return read(length, true); } protected byte[] read(int length) throws Exception { return read(length, false); } protected String readToken() throws Exception { skipWhitespace(); StringBuilder sb = new StringBuilder(); while (is.available() > 0) { char c = (char) peek(1)[0]; if (c == ' ' || c == '\t' || c == '\n' || c == '\r') { is.skip(1); break; } else if (c == '=' || c == '>' || c == '<' || c == '!') { // do not skip break; } sb.append(c); is.skip(1); } return sb.toString().trim(); } protected boolean isNumeric(char c) { return c == '-' || c == 'e' || (c >= '0' && c <= '9') || c == '.'; } protected void assertExpected(byte[] found, String expected) throws ParserException { assertExpected(new String(found), expected); } protected void assertExpected(String found, String expected) throws ParserException { if (!found.equals(expected)) { throw new ParserException("Expected " + expected + ", found " + found); } } protected void assertExpected(char found, char expected) throws ParserException { if (found != expected) { throw new ParserException("Expected " + expected + ", found " + found); } } protected static void efor(int length, FunctionThrowingException<Integer> consumer) throws Exception { for (int i = 0; i < length; i++) { consumer.accept(i); } } protected abstract void _parse() throws Exception; // Public stuff here private void parse() throws ParserException { // skip white spaces skipWhitespace(); try { _parse(); } catch (Exception e) { if (!(e instanceof ParserException)) { throw new ParserException("Error parsing", e); } else { throw (ParserException) e; } } skipWhitespace(); } // Private methods private byte[] read(int length, boolean peekOnly) throws Exception { byte[] buf = new byte[length]; if (peekOnly) { is.mark(length); } efor(length, (Integer c) -> buf[c] = (byte) is.read()); if (peekOnly) { is.reset(); } return buf; } protected void skipWhitespace() throws ParserException { try { while (is.available() > 0) { byte c = peek(1)[0]; if (c == ' ' || c == '\t' || c == '\n' || c == '\r') { // skip read(1); } else { break; } } } catch (Exception e) { throw new ParserException(e.getMessage(), e); } } }
6,960
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/FunctionThrowingException.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; @FunctionalInterface public interface FunctionThrowingException<T> { void accept(T t) throws Exception; }
6,961
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ListConst.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.InputStream; import java.util.LinkedList; import java.util.List; /** List of constants */ public class ListConst extends AbstractNode { private List<Object> values; public ListConst(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { byte[] peeked = read(1); assertExpected(peeked, "("); this.values = readList(); } private List<Object> readList() throws Exception { List<Object> list = new LinkedList<>(); boolean valid = false; char c; StringBuilder sb = new StringBuilder(); while (is.available() > 0) { c = (char) is.read(); if (c == ')') { valid = true; break; } else if (c == ',') { list.add(sb.toString().trim()); sb = new StringBuilder(); } else { sb.append(c); } } list.add(sb.toString().trim()); if (!valid) { throw new ParserException("Expected ')' but never encountered in the stream"); } return list; } public List<Object> getList() { return values; } @Override public String toString() { return values.toString(); } }
6,962
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Range.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.InputStream; public class Range extends AbstractNode { private String low; private String high; public Range(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { this.low = readNumber(is); skipWhitespace(); byte[] peeked = read(3); assertExpected(peeked, "AND"); skipWhitespace(); String num = readNumber(is); if ("".equals(num)) { throw new ParserException("Missing the upper range value..."); } this.high = num; } private String readNumber(InputStream is) throws Exception { StringBuilder sb = new StringBuilder(); while (is.available() > 0) { is.mark(1); char c = (char) is.read(); if (!isNumeric(c)) { is.reset(); break; } else { sb.append(c); } } return sb.toString().trim(); } /** * @return the low */ public String getLow() { return low; } /** * @return the high */ public String getHigh() { return high; } @Override public String toString() { return low + " AND " + high; } }
6,963
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/Name.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.InputStream; /** Represents the name of the field to be searched against. */ public class Name extends AbstractNode { private String value; public Name(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { this.value = readToken(); } @Override public String toString() { return value; } public String getName() { return value; } }
6,964
0
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser
Create_ds/conductor/es6-persistence/src/main/java/com/netflix/conductor/es6/dao/query/parser/internal/ComparisonOp.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.es6.dao.query.parser.internal; import java.io.InputStream; public class ComparisonOp extends AbstractNode { public enum Operators { BETWEEN("BETWEEN"), EQUALS("="), LESS_THAN("<"), GREATER_THAN(">"), IN("IN"), NOT_EQUALS("!="), IS("IS"), STARTS_WITH("STARTS_WITH"); private final String value; Operators(String value) { this.value = value; } public String value() { return value; } } static { int max = 0; for (Operators op : Operators.values()) { max = Math.max(max, op.value().length()); } maxOperatorLength = max; } private static final int maxOperatorLength; private static final int betweenLen = Operators.BETWEEN.value().length(); private static final int startsWithLen = Operators.STARTS_WITH.value().length(); private String value; public ComparisonOp(InputStream is) throws ParserException { super(is); } @Override protected void _parse() throws Exception { byte[] peeked = peek(maxOperatorLength); if (peeked[0] == '=' || peeked[0] == '>' || peeked[0] == '<') { this.value = new String(peeked, 0, 1); } else if (peeked[0] == 'I' && peeked[1] == 'N') { this.value = "IN"; } else if (peeked[0] == 'I' && peeked[1] == 'S') { this.value = "IS"; } else if (peeked[0] == '!' && peeked[1] == '=') { this.value = "!="; } else if (peeked.length >= betweenLen && peeked[0] == 'B' && peeked[1] == 'E' && peeked[2] == 'T' && peeked[3] == 'W' && peeked[4] == 'E' && peeked[5] == 'E' && peeked[6] == 'N') { this.value = Operators.BETWEEN.value(); } else if (peeked.length == startsWithLen && new String(peeked).equals(Operators.STARTS_WITH.value())) { this.value = Operators.STARTS_WITH.value(); } else { throw new ParserException( "Expecting an operator (=, >, <, !=, BETWEEN, IN, STARTS_WITH), but found none. Peeked=>" + new String(peeked)); } read(this.value.length()); } @Override public String toString() { return " " + value + " "; } public String getOperator() { return value; } }
6,965
0
Create_ds/conductor/grpc-server/src/test/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImplTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.grpc.SearchPb; import com.netflix.conductor.grpc.WorkflowServicePb; import com.netflix.conductor.proto.WorkflowPb; import com.netflix.conductor.proto.WorkflowSummaryPb; import com.netflix.conductor.service.WorkflowService; import io.grpc.stub.StreamObserver; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; public class WorkflowServiceImplTest { private static final String WORKFLOW_ID = "anyWorkflowId"; private static final Boolean RESUME_SUBWORKFLOW_TASKS = true; @Mock private WorkflowService workflowService; private WorkflowServiceImpl workflowServiceImpl; @Before public void init() { initMocks(this); workflowServiceImpl = new WorkflowServiceImpl(workflowService, 5000); } @SuppressWarnings("unchecked") @Test public void givenWorkflowIdWhenRetryWorkflowThenRetriedSuccessfully() { // Given WorkflowServicePb.RetryWorkflowRequest req = WorkflowServicePb.RetryWorkflowRequest.newBuilder() .setWorkflowId(WORKFLOW_ID) .setResumeSubworkflowTasks(true) .build(); // When workflowServiceImpl.retryWorkflow(req, mock(StreamObserver.class)); // Then verify(workflowService).retryWorkflow(WORKFLOW_ID, RESUME_SUBWORKFLOW_TASKS); } @Test public void searchExceptionTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<Throwable> throwable = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(50000) .setSort("strings") .setQuery("") .setFreeText("") .build(); StreamObserver<WorkflowServicePb.WorkflowSummarySearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(WorkflowServicePb.WorkflowSummarySearchResult value) {} @Override public void onError(Throwable t) { throwable.set(t); streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; workflowServiceImpl.search(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); assertEquals( "INVALID_ARGUMENT: Cannot return more than 5000 results", throwable.get().getMessage()); } @Test public void searchV2ExceptionTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<Throwable> throwable = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(50000) .setSort("strings") .setQuery("") .setFreeText("") .build(); StreamObserver<WorkflowServicePb.WorkflowSearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(WorkflowServicePb.WorkflowSearchResult value) {} @Override public void onError(Throwable t) { throwable.set(t); streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; workflowServiceImpl.searchV2(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); assertEquals( "INVALID_ARGUMENT: Cannot return more than 5000 results", throwable.get().getMessage()); } @Test public void searchTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<WorkflowServicePb.WorkflowSummarySearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("") .build(); StreamObserver<WorkflowServicePb.WorkflowSummarySearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(WorkflowServicePb.WorkflowSummarySearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; WorkflowSummary workflow = new WorkflowSummary(); SearchResult<WorkflowSummary> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(workflow)); when(workflowService.searchWorkflows( anyInt(), anyInt(), anyList(), anyString(), anyString())) .thenReturn(searchResult); workflowServiceImpl.search(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); WorkflowServicePb.WorkflowSummarySearchResult workflowSearchResult = result.get(); assertEquals(1, workflowSearchResult.getTotalHits()); assertEquals( WorkflowSummaryPb.WorkflowSummary.newBuilder().build(), workflowSearchResult.getResultsList().get(0)); } @Test public void searchByTasksTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<WorkflowServicePb.WorkflowSummarySearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("") .build(); StreamObserver<WorkflowServicePb.WorkflowSummarySearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(WorkflowServicePb.WorkflowSummarySearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; WorkflowSummary workflow = new WorkflowSummary(); SearchResult<WorkflowSummary> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(workflow)); when(workflowService.searchWorkflowsByTasks( anyInt(), anyInt(), anyList(), anyString(), anyString())) .thenReturn(searchResult); workflowServiceImpl.searchByTasks(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); WorkflowServicePb.WorkflowSummarySearchResult workflowSearchResult = result.get(); assertEquals(1, workflowSearchResult.getTotalHits()); assertEquals( WorkflowSummaryPb.WorkflowSummary.newBuilder().build(), workflowSearchResult.getResultsList().get(0)); } @Test public void searchV2Test() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<WorkflowServicePb.WorkflowSearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("") .build(); StreamObserver<WorkflowServicePb.WorkflowSearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(WorkflowServicePb.WorkflowSearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; Workflow workflow = new Workflow(); SearchResult<Workflow> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(workflow)); when(workflowService.searchWorkflowsV2(1, 1, Collections.singletonList("strings"), "*", "")) .thenReturn(searchResult); workflowServiceImpl.searchV2(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); WorkflowServicePb.WorkflowSearchResult workflowSearchResult = result.get(); assertEquals(1, workflowSearchResult.getTotalHits()); assertEquals( WorkflowPb.Workflow.newBuilder().build(), workflowSearchResult.getResultsList().get(0)); } @Test public void searchByTasksV2Test() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<WorkflowServicePb.WorkflowSearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("") .build(); StreamObserver<WorkflowServicePb.WorkflowSearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(WorkflowServicePb.WorkflowSearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; Workflow workflow = new Workflow(); SearchResult<Workflow> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(workflow)); when(workflowService.searchWorkflowsByTasksV2( 1, 1, Collections.singletonList("strings"), "*", "")) .thenReturn(searchResult); workflowServiceImpl.searchByTasksV2(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); WorkflowServicePb.WorkflowSearchResult workflowSearchResult = result.get(); assertEquals(1, workflowSearchResult.getTotalHits()); assertEquals( WorkflowPb.Workflow.newBuilder().build(), workflowSearchResult.getResultsList().get(0)); } }
6,966
0
Create_ds/conductor/grpc-server/src/test/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/TaskServiceImplTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.grpc.SearchPb; import com.netflix.conductor.grpc.TaskServicePb; import com.netflix.conductor.proto.TaskPb; import com.netflix.conductor.proto.TaskSummaryPb; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.TaskService; import io.grpc.stub.StreamObserver; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; public class TaskServiceImplTest { @Mock private TaskService taskService; @Mock private ExecutionService executionService; private TaskServiceImpl taskServiceImpl; @Before public void init() { initMocks(this); taskServiceImpl = new TaskServiceImpl(executionService, taskService, 5000); } @Test public void searchExceptionTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<Throwable> throwable = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(50000) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSummarySearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSummarySearchResult value) {} @Override public void onError(Throwable t) { throwable.set(t); streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; taskServiceImpl.search(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); assertEquals( "INVALID_ARGUMENT: Cannot return more than 5000 results", throwable.get().getMessage()); } @Test public void searchV2ExceptionTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<Throwable> throwable = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(50000) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSearchResult value) {} @Override public void onError(Throwable t) { throwable.set(t); streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; taskServiceImpl.searchV2(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); assertEquals( "INVALID_ARGUMENT: Cannot return more than 5000 results", throwable.get().getMessage()); } @Test public void searchTest() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<TaskServicePb.TaskSummarySearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSummarySearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSummarySearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; TaskSummary taskSummary = new TaskSummary(); SearchResult<TaskSummary> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(taskSummary)); when(taskService.search(1, 1, "strings", "*", "")).thenReturn(searchResult); taskServiceImpl.search(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); TaskServicePb.TaskSummarySearchResult taskSummarySearchResult = result.get(); assertEquals(1, taskSummarySearchResult.getTotalHits()); assertEquals( TaskSummaryPb.TaskSummary.newBuilder().build(), taskSummarySearchResult.getResultsList().get(0)); } @Test public void searchV2Test() throws InterruptedException { CountDownLatch streamAlive = new CountDownLatch(1); AtomicReference<TaskServicePb.TaskSearchResult> result = new AtomicReference<>(); SearchPb.Request req = SearchPb.Request.newBuilder() .setStart(1) .setSize(1) .setSort("strings") .setQuery("") .setFreeText("*") .build(); StreamObserver<TaskServicePb.TaskSearchResult> streamObserver = new StreamObserver<>() { @Override public void onNext(TaskServicePb.TaskSearchResult value) { result.set(value); } @Override public void onError(Throwable t) { streamAlive.countDown(); } @Override public void onCompleted() { streamAlive.countDown(); } }; Task task = new Task(); SearchResult<Task> searchResult = new SearchResult<>(); searchResult.setTotalHits(1); searchResult.setResults(Collections.singletonList(task)); when(taskService.searchV2(1, 1, "strings", "*", "")).thenReturn(searchResult); taskServiceImpl.searchV2(req, streamObserver); streamAlive.await(10, TimeUnit.MILLISECONDS); TaskServicePb.TaskSearchResult taskSearchResult = result.get(); assertEquals(1, taskSearchResult.getTotalHits()); assertEquals( TaskPb.Task.newBuilder().setCallbackFromWorker(true).build(), taskSearchResult.getResultsList().get(0)); } }
6,967
0
Create_ds/conductor/grpc-server/src/test/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; public class HealthServiceImplTest { // SBMTODO: Move this Spring boot health check // @Rule // public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); // // @Rule // public ExpectedException thrown = ExpectedException.none(); // // @Test // public void healthServing() throws Exception { // // Generate a unique in-process server name. // String serverName = InProcessServerBuilder.generateName(); // HealthCheckAggregator hca = mock(HealthCheckAggregator.class); // CompletableFuture<HealthCheckStatus> hcsf = mock(CompletableFuture.class); // HealthCheckStatus hcs = mock(HealthCheckStatus.class); // when(hcs.isHealthy()).thenReturn(true); // when(hcsf.get()).thenReturn(hcs); // when(hca.check()).thenReturn(hcsf); // HealthServiceImpl healthyService = new HealthServiceImpl(hca); // // addService(serverName, healthyService); // HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( // // Create a client channel and register for automatic graceful shutdown. // // grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); // // // HealthCheckResponse reply = // blockingStub.check(HealthCheckRequest.newBuilder().build()); // // assertEquals(HealthCheckResponse.ServingStatus.SERVING, reply.getStatus()); // } // // @Test // public void healthNotServing() throws Exception { // // Generate a unique in-process server name. // String serverName = InProcessServerBuilder.generateName(); // HealthCheckAggregator hca = mock(HealthCheckAggregator.class); // CompletableFuture<HealthCheckStatus> hcsf = mock(CompletableFuture.class); // HealthCheckStatus hcs = mock(HealthCheckStatus.class); // when(hcs.isHealthy()).thenReturn(false); // when(hcsf.get()).thenReturn(hcs); // when(hca.check()).thenReturn(hcsf); // HealthServiceImpl healthyService = new HealthServiceImpl(hca); // // addService(serverName, healthyService); // HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( // // Create a client channel and register for automatic graceful shutdown. // // grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); // // // HealthCheckResponse reply = // blockingStub.check(HealthCheckRequest.newBuilder().build()); // // assertEquals(HealthCheckResponse.ServingStatus.NOT_SERVING, reply.getStatus()); // } // // @Test // public void healthException() throws Exception { // // Generate a unique in-process server name. // String serverName = InProcessServerBuilder.generateName(); // HealthCheckAggregator hca = mock(HealthCheckAggregator.class); // CompletableFuture<HealthCheckStatus> hcsf = mock(CompletableFuture.class); // when(hcsf.get()).thenThrow(InterruptedException.class); // when(hca.check()).thenReturn(hcsf); // HealthServiceImpl healthyService = new HealthServiceImpl(hca); // // addService(serverName, healthyService); // HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( // // Create a client channel and register for automatic graceful shutdown. // // grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); // // thrown.expect(StatusRuntimeException.class); // thrown.expect(hasProperty("status", is(Status.INTERNAL))); // blockingStub.check(HealthCheckRequest.newBuilder().build()); // // } // // private void addService(String name, BindableService service) throws Exception { // // Create a server, add service, start, and register for automatic graceful shutdown. // grpcCleanup.register(InProcessServerBuilder // .forName(name).directExecutor().addService(service).build().start()); // } }
6,968
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server; import java.io.IOException; import java.util.List; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.grpc.BindableService; import io.grpc.Server; import io.grpc.ServerBuilder; public class GRPCServer { private static final Logger LOGGER = LoggerFactory.getLogger(GRPCServer.class); private final Server server; public GRPCServer(int port, List<BindableService> services) { ServerBuilder<?> builder = ServerBuilder.forPort(port); services.forEach(builder::addService); server = builder.build(); } @PostConstruct public void start() throws IOException { server.start(); LOGGER.info("grpc: Server started, listening on " + server.getPort()); } @PreDestroy public void stop() { if (server != null) { LOGGER.info("grpc: server shutting down"); server.shutdown(); } } }
6,969
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProperties.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server; import org.springframework.boot.context.properties.ConfigurationProperties; @ConfigurationProperties("conductor.grpc-server") public class GRPCServerProperties { /** The port at which the gRPC server will serve requests */ private int port = 8090; /** Enables the reflection service for Protobuf services */ private boolean reflectionEnabled = true; public int getPort() { return port; } public void setPort(int port) { this.port = port; } public boolean isReflectionEnabled() { return reflectionEnabled; } public void setReflectionEnabled(boolean reflectionEnabled) { this.reflectionEnabled = reflectionEnabled; } }
6,970
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GrpcConfiguration.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server; import java.util.List; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import io.grpc.BindableService; import io.grpc.protobuf.services.ProtoReflectionService; @Configuration @ConditionalOnProperty(name = "conductor.grpc-server.enabled", havingValue = "true") @EnableConfigurationProperties(GRPCServerProperties.class) public class GrpcConfiguration { @Bean public GRPCServer grpcServer( List<BindableService> bindableServices, // all gRPC service implementations GRPCServerProperties grpcServerProperties) { if (grpcServerProperties.isReflectionEnabled()) { bindableServices.add(ProtoReflectionService.newInstance()); } return new GRPCServer(grpcServerProperties.getPort(), bindableServices); } }
6,971
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.grpc.SearchPb; import com.netflix.conductor.grpc.WorkflowServiceGrpc; import com.netflix.conductor.grpc.WorkflowServicePb; import com.netflix.conductor.proto.RerunWorkflowRequestPb; import com.netflix.conductor.proto.StartWorkflowRequestPb; import com.netflix.conductor.proto.WorkflowPb; import com.netflix.conductor.service.WorkflowService; import io.grpc.Status; import io.grpc.stub.StreamObserver; @Service("grpcWorkflowService") public class WorkflowServiceImpl extends WorkflowServiceGrpc.WorkflowServiceImplBase { private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); private final WorkflowService workflowService; private final int maxSearchSize; public WorkflowServiceImpl( WorkflowService workflowService, @Value("${workflow.max.search.size:5000}") int maxSearchSize) { this.workflowService = workflowService; this.maxSearchSize = maxSearchSize; } @Override public void startWorkflow( StartWorkflowRequestPb.StartWorkflowRequest pbRequest, StreamObserver<WorkflowServicePb.StartWorkflowResponse> response) { // TODO: better handling of optional 'version' final StartWorkflowRequest request = PROTO_MAPPER.fromProto(pbRequest); try { String id = workflowService.startWorkflow( pbRequest.getName(), GRPC_HELPER.optional(request.getVersion()), request.getCorrelationId(), request.getPriority(), request.getInput(), request.getExternalInputPayloadStoragePath(), request.getTaskToDomain(), request.getWorkflowDef()); response.onNext( WorkflowServicePb.StartWorkflowResponse.newBuilder().setWorkflowId(id).build()); response.onCompleted(); } catch (NotFoundException nfe) { response.onError( Status.NOT_FOUND .withDescription("No such workflow found by name=" + request.getName()) .asRuntimeException()); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void getWorkflows( WorkflowServicePb.GetWorkflowsRequest req, StreamObserver<WorkflowServicePb.GetWorkflowsResponse> response) { final String name = req.getName(); final boolean includeClosed = req.getIncludeClosed(); final boolean includeTasks = req.getIncludeTasks(); WorkflowServicePb.GetWorkflowsResponse.Builder builder = WorkflowServicePb.GetWorkflowsResponse.newBuilder(); for (String correlationId : req.getCorrelationIdList()) { List<Workflow> workflows = workflowService.getWorkflows(name, correlationId, includeClosed, includeTasks); builder.putWorkflowsById( correlationId, WorkflowServicePb.GetWorkflowsResponse.Workflows.newBuilder() .addAllWorkflows( workflows.stream().map(PROTO_MAPPER::toProto)::iterator) .build()); } response.onNext(builder.build()); response.onCompleted(); } @Override public void getWorkflowStatus( WorkflowServicePb.GetWorkflowStatusRequest req, StreamObserver<WorkflowPb.Workflow> response) { try { Workflow workflow = workflowService.getExecutionStatus(req.getWorkflowId(), req.getIncludeTasks()); response.onNext(PROTO_MAPPER.toProto(workflow)); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void removeWorkflow( WorkflowServicePb.RemoveWorkflowRequest req, StreamObserver<WorkflowServicePb.RemoveWorkflowResponse> response) { try { workflowService.deleteWorkflow(req.getWorkflodId(), req.getArchiveWorkflow()); response.onNext(WorkflowServicePb.RemoveWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void getRunningWorkflows( WorkflowServicePb.GetRunningWorkflowsRequest req, StreamObserver<WorkflowServicePb.GetRunningWorkflowsResponse> response) { try { List<String> workflowIds = workflowService.getRunningWorkflows( req.getName(), req.getVersion(), req.getStartTime(), req.getEndTime()); response.onNext( WorkflowServicePb.GetRunningWorkflowsResponse.newBuilder() .addAllWorkflowIds(workflowIds) .build()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void decideWorkflow( WorkflowServicePb.DecideWorkflowRequest req, StreamObserver<WorkflowServicePb.DecideWorkflowResponse> response) { try { workflowService.decideWorkflow(req.getWorkflowId()); response.onNext(WorkflowServicePb.DecideWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void pauseWorkflow( WorkflowServicePb.PauseWorkflowRequest req, StreamObserver<WorkflowServicePb.PauseWorkflowResponse> response) { try { workflowService.pauseWorkflow(req.getWorkflowId()); response.onNext(WorkflowServicePb.PauseWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void resumeWorkflow( WorkflowServicePb.ResumeWorkflowRequest req, StreamObserver<WorkflowServicePb.ResumeWorkflowResponse> response) { try { workflowService.resumeWorkflow(req.getWorkflowId()); response.onNext(WorkflowServicePb.ResumeWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void skipTaskFromWorkflow( WorkflowServicePb.SkipTaskRequest req, StreamObserver<WorkflowServicePb.SkipTaskResponse> response) { try { SkipTaskRequest skipTask = PROTO_MAPPER.fromProto(req.getRequest()); workflowService.skipTaskFromWorkflow( req.getWorkflowId(), req.getTaskReferenceName(), skipTask); response.onNext(WorkflowServicePb.SkipTaskResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void rerunWorkflow( RerunWorkflowRequestPb.RerunWorkflowRequest req, StreamObserver<WorkflowServicePb.RerunWorkflowResponse> response) { try { String id = workflowService.rerunWorkflow( req.getReRunFromWorkflowId(), PROTO_MAPPER.fromProto(req)); response.onNext( WorkflowServicePb.RerunWorkflowResponse.newBuilder().setWorkflowId(id).build()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void restartWorkflow( WorkflowServicePb.RestartWorkflowRequest req, StreamObserver<WorkflowServicePb.RestartWorkflowResponse> response) { try { workflowService.restartWorkflow(req.getWorkflowId(), req.getUseLatestDefinitions()); response.onNext(WorkflowServicePb.RestartWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void retryWorkflow( WorkflowServicePb.RetryWorkflowRequest req, StreamObserver<WorkflowServicePb.RetryWorkflowResponse> response) { try { workflowService.retryWorkflow(req.getWorkflowId(), req.getResumeSubworkflowTasks()); response.onNext(WorkflowServicePb.RetryWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void resetWorkflowCallbacks( WorkflowServicePb.ResetWorkflowCallbacksRequest req, StreamObserver<WorkflowServicePb.ResetWorkflowCallbacksResponse> response) { try { workflowService.resetWorkflow(req.getWorkflowId()); response.onNext(WorkflowServicePb.ResetWorkflowCallbacksResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void terminateWorkflow( WorkflowServicePb.TerminateWorkflowRequest req, StreamObserver<WorkflowServicePb.TerminateWorkflowResponse> response) { try { workflowService.terminateWorkflow(req.getWorkflowId(), req.getReason()); response.onNext(WorkflowServicePb.TerminateWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } private void doSearch( boolean searchByTask, SearchPb.Request req, StreamObserver<WorkflowServicePb.WorkflowSummarySearchResult> response) { final int start = req.getStart(); final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); final List<String> sort = convertSort(req.getSort()); final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); final String query = req.getQuery(); if (size > maxSearchSize) { response.onError( Status.INVALID_ARGUMENT .withDescription( "Cannot return more than " + maxSearchSize + " results") .asRuntimeException()); return; } SearchResult<WorkflowSummary> search; if (searchByTask) { search = workflowService.searchWorkflowsByTasks(start, size, sort, freeText, query); } else { search = workflowService.searchWorkflows(start, size, sort, freeText, query); } response.onNext( WorkflowServicePb.WorkflowSummarySearchResult.newBuilder() .setTotalHits(search.getTotalHits()) .addAllResults( search.getResults().stream().map(PROTO_MAPPER::toProto)::iterator) .build()); response.onCompleted(); } private void doSearchV2( boolean searchByTask, SearchPb.Request req, StreamObserver<WorkflowServicePb.WorkflowSearchResult> response) { final int start = req.getStart(); final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); final List<String> sort = convertSort(req.getSort()); final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); final String query = req.getQuery(); if (size > maxSearchSize) { response.onError( Status.INVALID_ARGUMENT .withDescription( "Cannot return more than " + maxSearchSize + " results") .asRuntimeException()); return; } SearchResult<Workflow> search; if (searchByTask) { search = workflowService.searchWorkflowsByTasksV2(start, size, sort, freeText, query); } else { search = workflowService.searchWorkflowsV2(start, size, sort, freeText, query); } response.onNext( WorkflowServicePb.WorkflowSearchResult.newBuilder() .setTotalHits(search.getTotalHits()) .addAllResults( search.getResults().stream().map(PROTO_MAPPER::toProto)::iterator) .build()); response.onCompleted(); } private List<String> convertSort(String sortStr) { List<String> list = new ArrayList<>(); if (sortStr != null && sortStr.length() != 0) { list = Arrays.asList(sortStr.split("\\|")); } return list; } @Override public void search( SearchPb.Request request, StreamObserver<WorkflowServicePb.WorkflowSummarySearchResult> responseObserver) { doSearch(false, request, responseObserver); } @Override public void searchByTasks( SearchPb.Request request, StreamObserver<WorkflowServicePb.WorkflowSummarySearchResult> responseObserver) { doSearch(true, request, responseObserver); } @Override public void searchV2( SearchPb.Request request, StreamObserver<WorkflowServicePb.WorkflowSearchResult> responseObserver) { doSearchV2(false, request, responseObserver); } @Override public void searchByTasksV2( SearchPb.Request request, StreamObserver<WorkflowServicePb.WorkflowSearchResult> responseObserver) { doSearchV2(true, request, responseObserver); } }
6,972
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import org.springframework.stereotype.Service; import io.grpc.health.v1.HealthCheckRequest; import io.grpc.health.v1.HealthCheckResponse; import io.grpc.health.v1.HealthGrpc; import io.grpc.stub.StreamObserver; @Service("grpcHealthService") public class HealthServiceImpl extends HealthGrpc.HealthImplBase { // SBMTODO: Move this Spring boot health check @Override public void check( HealthCheckRequest request, StreamObserver<HealthCheckResponse> responseObserver) { responseObserver.onNext( HealthCheckResponse.newBuilder() .setStatus(HealthCheckResponse.ServingStatus.SERVING) .build()); responseObserver.onCompleted(); } }
6,973
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.netflix.conductor.grpc.EventServiceGrpc; import com.netflix.conductor.grpc.EventServicePb; import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.proto.EventHandlerPb; import com.netflix.conductor.service.MetadataService; import io.grpc.stub.StreamObserver; @Service("grpcEventService") public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { private static final Logger LOGGER = LoggerFactory.getLogger(EventServiceImpl.class); private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; private final MetadataService metadataService; public EventServiceImpl(MetadataService metadataService) { this.metadataService = metadataService; } @Override public void addEventHandler( EventServicePb.AddEventHandlerRequest req, StreamObserver<EventServicePb.AddEventHandlerResponse> response) { metadataService.addEventHandler(PROTO_MAPPER.fromProto(req.getHandler())); response.onNext(EventServicePb.AddEventHandlerResponse.getDefaultInstance()); response.onCompleted(); } @Override public void updateEventHandler( EventServicePb.UpdateEventHandlerRequest req, StreamObserver<EventServicePb.UpdateEventHandlerResponse> response) { metadataService.updateEventHandler(PROTO_MAPPER.fromProto(req.getHandler())); response.onNext(EventServicePb.UpdateEventHandlerResponse.getDefaultInstance()); response.onCompleted(); } @Override public void removeEventHandler( EventServicePb.RemoveEventHandlerRequest req, StreamObserver<EventServicePb.RemoveEventHandlerResponse> response) { metadataService.removeEventHandlerStatus(req.getName()); response.onNext(EventServicePb.RemoveEventHandlerResponse.getDefaultInstance()); response.onCompleted(); } @Override public void getEventHandlers( EventServicePb.GetEventHandlersRequest req, StreamObserver<EventHandlerPb.EventHandler> response) { metadataService.getAllEventHandlers().stream() .map(PROTO_MAPPER::toProto) .forEach(response::onNext); response.onCompleted(); } @Override public void getEventHandlersForEvent( EventServicePb.GetEventHandlersForEventRequest req, StreamObserver<EventHandlerPb.EventHandler> response) { metadataService.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly()).stream() .map(PROTO_MAPPER::toProto) .forEach(response::onNext); response.onCompleted(); } }
6,974
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import java.util.Arrays; import javax.annotation.Nonnull; import org.apache.commons.lang3.exception.ExceptionUtils; import org.slf4j.Logger; import com.google.rpc.DebugInfo; import io.grpc.Metadata; import io.grpc.Status; import io.grpc.StatusException; import io.grpc.protobuf.lite.ProtoLiteUtils; import io.grpc.stub.StreamObserver; public class GRPCHelper { private final Logger logger; private static final Metadata.Key<DebugInfo> STATUS_DETAILS_KEY = Metadata.Key.of( "grpc-status-details-bin", ProtoLiteUtils.metadataMarshaller(DebugInfo.getDefaultInstance())); public GRPCHelper(Logger log) { this.logger = log; } /** * Converts an internal exception thrown by Conductor into an StatusException that uses modern * "Status" metadata for GRPC. * * <p>Note that this is trickier than it ought to be because the GRPC APIs have not been * upgraded yet. Here's a quick breakdown of how this works in practice: * * <p>Reporting a "status" result back to a client with GRPC is pretty straightforward. GRPC * implementations simply serialize the status into several HTTP/2 trailer headers that are sent * back to the client before shutting down the HTTP/2 stream. * * <p>- 'grpc-status', which is a string representation of a {@link com.google.rpc.Code} - * 'grpc-message', which is the description of the returned status - 'grpc-status-details-bin' * (optional), which is an arbitrary payload with a serialized ProtoBuf object, containing an * accurate description of the error in case the status is not successful. * * <p>By convention, Google provides a default set of ProtoBuf messages for the most common * error cases. Here, we'll be using {@link DebugInfo}, as we're reporting an internal Java * exception which we couldn't properly handle. * * <p>Now, how do we go about sending all those headers _and_ the {@link DebugInfo} payload * using the Java GRPC API? * * <p>The only way we can return an error with the Java API is by passing an instance of {@link * io.grpc.StatusException} or {@link io.grpc.StatusRuntimeException} to {@link * StreamObserver#onError(Throwable)}. The easiest way to create either of these exceptions is * by using the {@link Status} class and one of its predefined code identifiers (in this case, * {@link Status#INTERNAL} because we're reporting an internal exception). The {@link Status} * class has setters to set its most relevant attributes, namely those that will be * automatically serialized into the 'grpc-status' and 'grpc-message' trailers in the response. * There is, however, no setter to pass an arbitrary ProtoBuf message to be serialized into a * `grpc-status-details-bin` trailer. This feature exists in the other language implementations * but it hasn't been brought to Java yet. * * <p>Fortunately, {@link Status#asException(Metadata)} exists, allowing us to pass any amount * of arbitrary trailers before we close the response. So we're using this API to manually craft * the 'grpc-status-detail-bin' trailer, in the same way that the GRPC server implementations * for Go and C++ craft and serialize the header. This will allow us to access the metadata * cleanly from Go and C++ clients by using the 'details' method which _has_ been implemented in * those two clients. * * @param t The exception to convert * @return an instance of {@link StatusException} which will properly serialize all its headers * into the response. */ private StatusException throwableToStatusException(Throwable t) { String[] frames = ExceptionUtils.getStackFrames(t); Metadata metadata = new Metadata(); metadata.put( STATUS_DETAILS_KEY, DebugInfo.newBuilder() .addAllStackEntries(Arrays.asList(frames)) .setDetail(ExceptionUtils.getMessage(t)) .build()); return Status.INTERNAL.withDescription(t.getMessage()).withCause(t).asException(metadata); } void onError(StreamObserver<?> response, Throwable t) { logger.error("internal exception during GRPC request", t); response.onError(throwableToStatusException(t)); } /** * Convert a non-null String instance to a possibly null String instance based on ProtoBuf's * rules for optional arguments. * * <p>This helper converts an String instance from a ProtoBuf object into a possibly null * String. In ProtoBuf objects, String fields are not nullable, but an empty String field is * considered to be "missing". * * <p>The internal Conductor APIs expect missing arguments to be passed as null values, so this * helper performs such conversion. * * @param str a string from a ProtoBuf object * @return the original string, or null */ String optional(@Nonnull String str) { return str.isEmpty() ? null : str; } /** * Check if a given non-null String instance is "missing" according to ProtoBuf's missing field * rules. If the String is missing, the given default value will be returned. Otherwise, the * string itself will be returned. * * @param str the input String * @param defaults the default value for the string * @return 'str' if it is not empty according to ProtoBuf rules; 'defaults' otherwise */ String optionalOr(@Nonnull String str, String defaults) { return str.isEmpty() ? defaults : str; } /** * Convert a non-null Integer instance to a possibly null Integer instance based on ProtoBuf's * rules for optional arguments. * * <p>This helper converts an Integer instance from a ProtoBuf object into a possibly null * Integer. In ProtoBuf objects, Integer fields are not nullable, but a zero-value Integer field * is considered to be "missing". * * <p>The internal Conductor APIs expect missing arguments to be passed as null values, so this * helper performs such conversion. * * @param i an Integer from a ProtoBuf object * @return the original Integer, or null */ Integer optional(@Nonnull Integer i) { return i == 0 ? null : i; } /** * Check if a given non-null Integer instance is "missing" according to ProtoBuf's missing field * rules. If the Integer is missing (i.e. if it has a zero-value), the given default value will * be returned. Otherwise, the Integer itself will be returned. * * @param i the input Integer * @param defaults the default value for the Integer * @return 'i' if it is not a zero-value according to ProtoBuf rules; 'defaults' otherwise */ Integer optionalOr(@Nonnull Integer i, int defaults) { return i == 0 ? defaults : i; } }
6,975
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import java.util.List; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.core.exception.NotFoundException; import com.netflix.conductor.grpc.MetadataServiceGrpc; import com.netflix.conductor.grpc.MetadataServicePb; import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.proto.TaskDefPb; import com.netflix.conductor.proto.WorkflowDefPb; import com.netflix.conductor.service.MetadataService; import io.grpc.Status; import io.grpc.stub.StreamObserver; @Service("grpcMetadataService") public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { private static final Logger LOGGER = LoggerFactory.getLogger(MetadataServiceImpl.class); private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); private final MetadataService service; public MetadataServiceImpl(MetadataService service) { this.service = service; } @Override public void createWorkflow( MetadataServicePb.CreateWorkflowRequest req, StreamObserver<MetadataServicePb.CreateWorkflowResponse> response) { WorkflowDef workflow = PROTO_MAPPER.fromProto(req.getWorkflow()); service.registerWorkflowDef(workflow); response.onNext(MetadataServicePb.CreateWorkflowResponse.getDefaultInstance()); response.onCompleted(); } @Override public void validateWorkflow( MetadataServicePb.ValidateWorkflowRequest req, StreamObserver<MetadataServicePb.ValidateWorkflowResponse> response) { WorkflowDef workflow = PROTO_MAPPER.fromProto(req.getWorkflow()); service.validateWorkflowDef(workflow); response.onNext(MetadataServicePb.ValidateWorkflowResponse.getDefaultInstance()); response.onCompleted(); } @Override public void updateWorkflows( MetadataServicePb.UpdateWorkflowsRequest req, StreamObserver<MetadataServicePb.UpdateWorkflowsResponse> response) { List<WorkflowDef> workflows = req.getDefsList().stream() .map(PROTO_MAPPER::fromProto) .collect(Collectors.toList()); service.updateWorkflowDef(workflows); response.onNext(MetadataServicePb.UpdateWorkflowsResponse.getDefaultInstance()); response.onCompleted(); } @Override public void getWorkflow( MetadataServicePb.GetWorkflowRequest req, StreamObserver<MetadataServicePb.GetWorkflowResponse> response) { try { WorkflowDef workflowDef = service.getWorkflowDef(req.getName(), GRPC_HELPER.optional(req.getVersion())); WorkflowDefPb.WorkflowDef workflow = PROTO_MAPPER.toProto(workflowDef); response.onNext( MetadataServicePb.GetWorkflowResponse.newBuilder() .setWorkflow(workflow) .build()); response.onCompleted(); } catch (NotFoundException e) { // TODO replace this with gRPC exception interceptor. response.onError( Status.NOT_FOUND .withDescription("No such workflow found by name=" + req.getName()) .asRuntimeException()); } } @Override public void createTasks( MetadataServicePb.CreateTasksRequest req, StreamObserver<MetadataServicePb.CreateTasksResponse> response) { service.registerTaskDef( req.getDefsList().stream() .map(PROTO_MAPPER::fromProto) .collect(Collectors.toList())); response.onNext(MetadataServicePb.CreateTasksResponse.getDefaultInstance()); response.onCompleted(); } @Override public void updateTask( MetadataServicePb.UpdateTaskRequest req, StreamObserver<MetadataServicePb.UpdateTaskResponse> response) { TaskDef task = PROTO_MAPPER.fromProto(req.getTask()); service.updateTaskDef(task); response.onNext(MetadataServicePb.UpdateTaskResponse.getDefaultInstance()); response.onCompleted(); } @Override public void getTask( MetadataServicePb.GetTaskRequest req, StreamObserver<MetadataServicePb.GetTaskResponse> response) { TaskDef def = service.getTaskDef(req.getTaskType()); if (def != null) { TaskDefPb.TaskDef task = PROTO_MAPPER.toProto(def); response.onNext(MetadataServicePb.GetTaskResponse.newBuilder().setTask(task).build()); response.onCompleted(); } else { response.onError( Status.NOT_FOUND .withDescription( "No such TaskDef found by taskType=" + req.getTaskType()) .asRuntimeException()); } } @Override public void deleteTask( MetadataServicePb.DeleteTaskRequest req, StreamObserver<MetadataServicePb.DeleteTaskResponse> response) { service.unregisterTaskDef(req.getTaskType()); response.onNext(MetadataServicePb.DeleteTaskResponse.getDefaultInstance()); response.onCompleted(); } }
6,976
0
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server
Create_ds/conductor/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.grpc.server.service; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.grpc.SearchPb; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.TaskServicePb; import com.netflix.conductor.proto.TaskPb; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.TaskService; import io.grpc.Status; import io.grpc.stub.StreamObserver; @Service("grpcTaskService") public class TaskServiceImpl extends TaskServiceGrpc.TaskServiceImplBase { private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); private static final int POLL_TIMEOUT_MS = 100; private static final int MAX_POLL_TIMEOUT_MS = 5000; private final TaskService taskService; private final int maxSearchSize; private final ExecutionService executionService; public TaskServiceImpl( ExecutionService executionService, TaskService taskService, @Value("${workflow.max.search.size:5000}") int maxSearchSize) { this.executionService = executionService; this.taskService = taskService; this.maxSearchSize = maxSearchSize; } @Override public void poll( TaskServicePb.PollRequest req, StreamObserver<TaskServicePb.PollResponse> response) { try { List<Task> tasks = executionService.poll( req.getTaskType(), req.getWorkerId(), GRPC_HELPER.optional(req.getDomain()), 1, POLL_TIMEOUT_MS); if (!tasks.isEmpty()) { TaskPb.Task t = PROTO_MAPPER.toProto(tasks.get(0)); response.onNext(TaskServicePb.PollResponse.newBuilder().setTask(t).build()); } response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void batchPoll( TaskServicePb.BatchPollRequest req, StreamObserver<TaskPb.Task> response) { final int count = GRPC_HELPER.optionalOr(req.getCount(), 1); final int timeout = GRPC_HELPER.optionalOr(req.getTimeout(), POLL_TIMEOUT_MS); if (timeout > MAX_POLL_TIMEOUT_MS) { response.onError( Status.INVALID_ARGUMENT .withDescription( "longpoll timeout cannot be longer than " + MAX_POLL_TIMEOUT_MS + "ms") .asRuntimeException()); return; } try { List<Task> polledTasks = taskService.batchPoll( req.getTaskType(), req.getWorkerId(), GRPC_HELPER.optional(req.getDomain()), count, timeout); LOGGER.info("polled tasks: " + polledTasks); polledTasks.stream().map(PROTO_MAPPER::toProto).forEach(response::onNext); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void updateTask( TaskServicePb.UpdateTaskRequest req, StreamObserver<TaskServicePb.UpdateTaskResponse> response) { try { TaskResult task = PROTO_MAPPER.fromProto(req.getResult()); taskService.updateTask(task); response.onNext( TaskServicePb.UpdateTaskResponse.newBuilder() .setTaskId(task.getTaskId()) .build()); response.onCompleted(); } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void addLog( TaskServicePb.AddLogRequest req, StreamObserver<TaskServicePb.AddLogResponse> response) { taskService.log(req.getTaskId(), req.getLog()); response.onNext(TaskServicePb.AddLogResponse.getDefaultInstance()); response.onCompleted(); } @Override public void getTaskLogs( TaskServicePb.GetTaskLogsRequest req, StreamObserver<TaskServicePb.GetTaskLogsResponse> response) { List<TaskExecLog> logs = taskService.getTaskLogs(req.getTaskId()); response.onNext( TaskServicePb.GetTaskLogsResponse.newBuilder() .addAllLogs(logs.stream().map(PROTO_MAPPER::toProto)::iterator) .build()); response.onCompleted(); } @Override public void getTask( TaskServicePb.GetTaskRequest req, StreamObserver<TaskServicePb.GetTaskResponse> response) { try { Task task = taskService.getTask(req.getTaskId()); if (task == null) { response.onError( Status.NOT_FOUND .withDescription("No such task found by id=" + req.getTaskId()) .asRuntimeException()); } else { response.onNext( TaskServicePb.GetTaskResponse.newBuilder() .setTask(PROTO_MAPPER.toProto(task)) .build()); response.onCompleted(); } } catch (Exception e) { GRPC_HELPER.onError(response, e); } } @Override public void getQueueSizesForTasks( TaskServicePb.QueueSizesRequest req, StreamObserver<TaskServicePb.QueueSizesResponse> response) { Map<String, Integer> sizes = taskService.getTaskQueueSizes(req.getTaskTypesList()); response.onNext( TaskServicePb.QueueSizesResponse.newBuilder().putAllQueueForTask(sizes).build()); response.onCompleted(); } @Override public void getQueueInfo( TaskServicePb.QueueInfoRequest req, StreamObserver<TaskServicePb.QueueInfoResponse> response) { Map<String, Long> queueInfo = taskService.getAllQueueDetails(); response.onNext( TaskServicePb.QueueInfoResponse.newBuilder().putAllQueues(queueInfo).build()); response.onCompleted(); } @Override public void getQueueAllInfo( TaskServicePb.QueueAllInfoRequest req, StreamObserver<TaskServicePb.QueueAllInfoResponse> response) { Map<String, Map<String, Map<String, Long>>> info = taskService.allVerbose(); TaskServicePb.QueueAllInfoResponse.Builder queuesBuilder = TaskServicePb.QueueAllInfoResponse.newBuilder(); for (Map.Entry<String, Map<String, Map<String, Long>>> queue : info.entrySet()) { final String queueName = queue.getKey(); final Map<String, Map<String, Long>> queueShards = queue.getValue(); TaskServicePb.QueueAllInfoResponse.QueueInfo.Builder queueInfoBuilder = TaskServicePb.QueueAllInfoResponse.QueueInfo.newBuilder(); for (Map.Entry<String, Map<String, Long>> shard : queueShards.entrySet()) { final String shardName = shard.getKey(); final Map<String, Long> shardInfo = shard.getValue(); // FIXME: make shardInfo an actual type // shardInfo is an immutable map with predefined keys, so we can always // access 'size' and 'uacked'. It would be better if shardInfo // were actually a POJO. queueInfoBuilder.putShards( shardName, TaskServicePb.QueueAllInfoResponse.ShardInfo.newBuilder() .setSize(shardInfo.get("size")) .setUacked(shardInfo.get("uacked")) .build()); } queuesBuilder.putQueues(queueName, queueInfoBuilder.build()); } response.onNext(queuesBuilder.build()); response.onCompleted(); } @Override public void search( SearchPb.Request req, StreamObserver<TaskServicePb.TaskSummarySearchResult> response) { final int start = req.getStart(); final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); final String sort = req.getSort(); final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); final String query = req.getQuery(); if (size > maxSearchSize) { response.onError( Status.INVALID_ARGUMENT .withDescription( "Cannot return more than " + maxSearchSize + " results") .asRuntimeException()); return; } SearchResult<TaskSummary> searchResult = taskService.search(start, size, sort, freeText, query); response.onNext( TaskServicePb.TaskSummarySearchResult.newBuilder() .setTotalHits(searchResult.getTotalHits()) .addAllResults( searchResult.getResults().stream().map(PROTO_MAPPER::toProto) ::iterator) .build()); response.onCompleted(); } @Override public void searchV2( SearchPb.Request req, StreamObserver<TaskServicePb.TaskSearchResult> response) { final int start = req.getStart(); final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); final String sort = req.getSort(); final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); final String query = req.getQuery(); if (size > maxSearchSize) { response.onError( Status.INVALID_ARGUMENT .withDescription( "Cannot return more than " + maxSearchSize + " results") .asRuntimeException()); return; } SearchResult<Task> searchResult = taskService.searchV2(start, size, sort, freeText, query); response.onNext( TaskServicePb.TaskSearchResult.newBuilder() .setTotalHits(searchResult.getTotalHits()) .addAllResults( searchResult.getResults().stream().map(PROTO_MAPPER::toProto) ::iterator) .build()); response.onCompleted(); } }
6,977
0
Create_ds/conductor/test-harness/src/test/java/com/netflix
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/ConductorTestApp.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor; import java.io.IOException; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; /** Copy of com.netflix.conductor.Conductor for use by @SpringBootTest in AbstractSpecification. */ // Prevents from the datasource beans to be loaded, AS they are needed only for specific databases. // In case that SQL database is selected this class will be imported back in the appropriate // database persistence module. @SpringBootApplication(exclude = DataSourceAutoConfiguration.class) public class ConductorTestApp { public static void main(String[] args) throws IOException { SpringApplication.run(ConductorTestApp.class, args); } }
6,978
0
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration/AbstractEndToEndTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration; import java.io.BufferedReader; import java.io.InputStreamReader; import java.io.Reader; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Optional; import org.apache.http.HttpHost; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.test.context.TestPropertySource; import org.testcontainers.elasticsearch.ElasticsearchContainer; import org.testcontainers.utility.DockerImageName; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @TestPropertySource( properties = {"conductor.indexing.enabled=true", "conductor.elasticsearch.version=6"}) public abstract class AbstractEndToEndTest { private static final Logger log = LoggerFactory.getLogger(AbstractEndToEndTest.class); private static final String TASK_DEFINITION_PREFIX = "task_"; private static final String DEFAULT_DESCRIPTION = "description"; // Represents null value deserialized from the redis in memory db private static final String DEFAULT_NULL_VALUE = "null"; protected static final String DEFAULT_EMAIL_ADDRESS = "test@harness.com"; private static final ElasticsearchContainer container = new ElasticsearchContainer( DockerImageName.parse("docker.elastic.co/elasticsearch/elasticsearch-oss") .withTag("6.8.12")); // this should match the client version private static RestClient restClient; // Initialization happens in a static block so the container is initialized // only once for all the sub-class tests in a CI environment // container is stopped when JVM exits // https://www.testcontainers.org/test_framework_integration/manual_lifecycle_control/#singleton-containers static { container.start(); String httpHostAddress = container.getHttpHostAddress(); System.setProperty("conductor.elasticsearch.url", "http://" + httpHostAddress); log.info("Initialized Elasticsearch {}", container.getContainerId()); } @BeforeClass public static void initializeEs() { String httpHostAddress = container.getHttpHostAddress(); String host = httpHostAddress.split(":")[0]; int port = Integer.parseInt(httpHostAddress.split(":")[1]); RestClientBuilder restClientBuilder = RestClient.builder(new HttpHost(host, port, "http")); restClient = restClientBuilder.build(); } @AfterClass public static void cleanupEs() throws Exception { // deletes all indices Response beforeResponse = restClient.performRequest(new Request("GET", "/_cat/indices")); Reader streamReader = new InputStreamReader(beforeResponse.getEntity().getContent()); BufferedReader bufferedReader = new BufferedReader(streamReader); String line; while ((line = bufferedReader.readLine()) != null) { String[] fields = line.split("\\s"); String endpoint = String.format("/%s", fields[2]); restClient.performRequest(new Request("DELETE", endpoint)); } if (restClient != null) { restClient.close(); } } @Test public void testEphemeralWorkflowsWithStoredTasks() { String workflowExecutionName = "testEphemeralWorkflow"; createAndRegisterTaskDefinitions("storedTaskDef", 5); WorkflowDef workflowDefinition = createWorkflowDefinition(workflowExecutionName); WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); workflowDefinition.getTasks().addAll(Arrays.asList(workflowTask1, workflowTask2)); String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); assertNotNull(workflowId); Workflow workflow = getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); assertEquals(workflowDefinition, ephemeralWorkflow); } @Test public void testEphemeralWorkflowsWithEphemeralTasks() { String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; WorkflowDef workflowDefinition = createWorkflowDefinition(workflowExecutionName); WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); workflowTask1.setTaskDefinition(taskDefinition1); WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); workflowTask2.setTaskDefinition(taskDefinition2); workflowDefinition.getTasks().addAll(Arrays.asList(workflowTask1, workflowTask2)); String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); assertNotNull(workflowId); Workflow workflow = getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); assertEquals(workflowDefinition, ephemeralWorkflow); List<WorkflowTask> ephemeralTasks = ephemeralWorkflow.getTasks(); assertEquals(2, ephemeralTasks.size()); for (WorkflowTask ephemeralTask : ephemeralTasks) { assertNotNull(ephemeralTask.getTaskDefinition()); } } @Test public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() { createAndRegisterTaskDefinitions("storedTask", 1); WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); workflowTask1.setTaskDefinition(taskDefinition1); WorkflowTask workflowTask2 = createWorkflowTask("storedTask0"); workflowDefinition.getTasks().add(workflowTask1); workflowDefinition.getTasks().add(workflowTask2); String workflowExecutionName = "ephemeralWorkflowWithEphemeralAndStoredTasks"; String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); assertNotNull(workflowId); Workflow workflow = getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); assertEquals(workflowDefinition, ephemeralWorkflow); TaskDef storedTaskDefinition = getTaskDefinition("storedTask0"); List<WorkflowTask> tasks = ephemeralWorkflow.getTasks(); assertEquals(2, tasks.size()); assertEquals(workflowTask1, tasks.get(0)); TaskDef currentStoredTaskDefinition = tasks.get(1).getTaskDefinition(); assertNotNull(currentStoredTaskDefinition); assertEquals(storedTaskDefinition, currentStoredTaskDefinition); } @Test public void testEventHandler() { String eventName = "conductor:test_workflow:complete_task_with_event"; EventHandler eventHandler = new EventHandler(); eventHandler.setName("test_complete_task_event"); EventHandler.Action completeTaskAction = new EventHandler.Action(); completeTaskAction.setAction(EventHandler.Action.Type.complete_task); completeTaskAction.setComplete_task(new EventHandler.TaskDetails()); completeTaskAction.getComplete_task().setTaskRefName("test_task"); completeTaskAction.getComplete_task().setWorkflowId("test_id"); completeTaskAction.getComplete_task().setOutput(new HashMap<>()); eventHandler.getActions().add(completeTaskAction); eventHandler.setEvent(eventName); eventHandler.setActive(true); registerEventHandler(eventHandler); Iterator<EventHandler> it = getEventHandlers(eventName, true); EventHandler result = it.next(); assertFalse(it.hasNext()); assertEquals(eventHandler.getName(), result.getName()); } protected WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName(name); workflowTask.setWorkflowTaskType(TaskType.SIMPLE); workflowTask.setTaskReferenceName(name); workflowTask.setDescription(getDefaultDescription(name)); workflowTask.setDynamicTaskNameParam(DEFAULT_NULL_VALUE); workflowTask.setCaseValueParam(DEFAULT_NULL_VALUE); workflowTask.setCaseExpression(DEFAULT_NULL_VALUE); workflowTask.setDynamicForkTasksParam(DEFAULT_NULL_VALUE); workflowTask.setDynamicForkTasksInputParamName(DEFAULT_NULL_VALUE); workflowTask.setSink(DEFAULT_NULL_VALUE); workflowTask.setEvaluatorType(DEFAULT_NULL_VALUE); workflowTask.setExpression(DEFAULT_NULL_VALUE); return workflowTask; } protected TaskDef createTaskDefinition(String name) { TaskDef taskDefinition = new TaskDef(); taskDefinition.setName(name); return taskDefinition; } protected WorkflowDef createWorkflowDefinition(String workflowName) { WorkflowDef workflowDefinition = new WorkflowDef(); workflowDefinition.setName(workflowName); workflowDefinition.setDescription(getDefaultDescription(workflowName)); workflowDefinition.setFailureWorkflow(DEFAULT_NULL_VALUE); workflowDefinition.setOwnerEmail(DEFAULT_EMAIL_ADDRESS); return workflowDefinition; } protected List<TaskDef> createAndRegisterTaskDefinitions( String prefixTaskDefinition, int numberOfTaskDefinitions) { String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); List<TaskDef> definitions = new LinkedList<>(); for (int i = 0; i < numberOfTaskDefinitions; i++) { TaskDef def = new TaskDef( prefix + i, "task " + i + DEFAULT_DESCRIPTION, DEFAULT_EMAIL_ADDRESS, 3, 60, 60); def.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY); definitions.add(def); } this.registerTaskDefinitions(definitions); return definitions; } private String getDefaultDescription(String nameResource) { return nameResource + " " + DEFAULT_DESCRIPTION; } protected abstract String startWorkflow( String workflowExecutionName, WorkflowDef workflowDefinition); protected abstract Workflow getWorkflow(String workflowId, boolean includeTasks); protected abstract TaskDef getTaskDefinition(String taskName); protected abstract void registerTaskDefinitions(List<TaskDef> taskDefinitionList); protected abstract void registerWorkflowDefinition(WorkflowDef workflowDefinition); protected abstract void registerEventHandler(EventHandler eventHandler); protected abstract Iterator<EventHandler> getEventHandlers(String event, boolean activeOnly); }
6,979
0
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/GrpcEndToEndTest.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration.grpc; import org.junit.Before; import com.netflix.conductor.client.grpc.EventClient; import com.netflix.conductor.client.grpc.MetadataClient; import com.netflix.conductor.client.grpc.TaskClient; import com.netflix.conductor.client.grpc.WorkflowClient; public class GrpcEndToEndTest extends AbstractGrpcEndToEndTest { @Before public void init() { taskClient = new TaskClient("localhost", 8092); workflowClient = new WorkflowClient("localhost", 8092); metadataClient = new MetadataClient("localhost", 8092); eventClient = new EventClient("localhost", 8092); } }
6,980
0
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/AbstractGrpcEndToEndTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration.grpc; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringRunner; import com.netflix.conductor.client.grpc.EventClient; import com.netflix.conductor.client.grpc.MetadataClient; import com.netflix.conductor.client.grpc.TaskClient; import com.netflix.conductor.client.grpc.WorkflowClient; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.test.integration.AbstractEndToEndTest; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @RunWith(SpringRunner.class) @SpringBootTest( properties = {"conductor.grpc-server.enabled=true", "conductor.grpc-server.port=8092"}) @TestPropertySource(locations = "classpath:application-integrationtest.properties") public abstract class AbstractGrpcEndToEndTest extends AbstractEndToEndTest { protected static TaskClient taskClient; protected static WorkflowClient workflowClient; protected static MetadataClient metadataClient; protected static EventClient eventClient; @Override protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { StartWorkflowRequest workflowRequest = new StartWorkflowRequest() .withName(workflowExecutionName) .withWorkflowDef(workflowDefinition); return workflowClient.startWorkflow(workflowRequest); } @Override protected Workflow getWorkflow(String workflowId, boolean includeTasks) { return workflowClient.getWorkflow(workflowId, includeTasks); } @Override protected TaskDef getTaskDefinition(String taskName) { return metadataClient.getTaskDef(taskName); } @Override protected void registerTaskDefinitions(List<TaskDef> taskDefinitionList) { metadataClient.registerTaskDefs(taskDefinitionList); } @Override protected void registerWorkflowDefinition(WorkflowDef workflowDefinition) { metadataClient.registerWorkflowDef(workflowDefinition); } @Override protected void registerEventHandler(EventHandler eventHandler) { eventClient.registerEventHandler(eventHandler); } @Override protected Iterator<EventHandler> getEventHandlers(String event, boolean activeOnly) { return eventClient.getEventHandlers(event, activeOnly); } @Test public void testAll() throws Exception { assertNotNull(taskClient); List<TaskDef> defs = new LinkedList<>(); for (int i = 0; i < 5; i++) { TaskDef def = new TaskDef("t" + i, "task " + i, DEFAULT_EMAIL_ADDRESS, 3, 60, 60); def.setTimeoutPolicy(TimeoutPolicy.RETRY); defs.add(def); } metadataClient.registerTaskDefs(defs); for (int i = 0; i < 5; i++) { final String taskName = "t" + i; TaskDef def = metadataClient.getTaskDef(taskName); assertNotNull(def); assertEquals(taskName, def.getName()); } WorkflowDef def = createWorkflowDefinition("test"); WorkflowTask t0 = createWorkflowTask("t0"); WorkflowTask t1 = createWorkflowTask("t1"); def.getTasks().add(t0); def.getTasks().add(t1); metadataClient.registerWorkflowDef(def); WorkflowDef found = metadataClient.getWorkflowDef(def.getName(), null); assertNotNull(found); assertEquals(def, found); String correlationId = "test_corr_id"; StartWorkflowRequest startWf = new StartWorkflowRequest(); startWf.setName(def.getName()); startWf.setCorrelationId(correlationId); String workflowId = workflowClient.startWorkflow(startWf); assertNotNull(workflowId); Workflow workflow = workflowClient.getWorkflow(workflowId, false); assertEquals(0, workflow.getTasks().size()); assertEquals(workflowId, workflow.getWorkflowId()); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); assertEquals(1, workflow.getTasks().size()); assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); assertEquals(workflowId, workflow.getWorkflowId()); List<String> runningIds = workflowClient.getRunningWorkflow(def.getName(), def.getVersion()); assertNotNull(runningIds); assertEquals(1, runningIds.size()); assertEquals(workflowId, runningIds.get(0)); List<Task> polled = taskClient.batchPollTasksByTaskType("non existing task", "test", 1, 100); assertNotNull(polled); assertEquals(0, polled.size()); polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertEquals(1, polled.size()); assertEquals(t0.getName(), polled.get(0).getTaskDefName()); Task task = polled.get(0); task.getOutputData().put("key1", "value1"); task.setStatus(Status.COMPLETED); taskClient.updateTask(new TaskResult(task)); polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertTrue(polled.toString(), polled.isEmpty()); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); assertEquals(2, workflow.getTasks().size()); assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); assertEquals(t1.getTaskReferenceName(), workflow.getTasks().get(1).getReferenceTaskName()); assertEquals(Status.COMPLETED, workflow.getTasks().get(0).getStatus()); assertEquals(Status.SCHEDULED, workflow.getTasks().get(1).getStatus()); Task taskById = taskClient.getTaskDetails(task.getTaskId()); assertNotNull(taskById); assertEquals(task.getTaskId(), taskById.getTaskId()); Thread.sleep(1000); SearchResult<WorkflowSummary> searchResult = workflowClient.search("workflowType='" + def.getName() + "'"); assertNotNull(searchResult); assertEquals(1, searchResult.getTotalHits()); assertEquals(workflow.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); SearchResult<Workflow> searchResultV2 = workflowClient.searchV2("workflowType='" + def.getName() + "'"); assertNotNull(searchResultV2); assertEquals(1, searchResultV2.getTotalHits()); assertEquals(workflow.getWorkflowId(), searchResultV2.getResults().get(0).getWorkflowId()); SearchResult<WorkflowSummary> searchResultAdvanced = workflowClient.search(0, 1, null, null, "workflowType='" + def.getName() + "'"); assertNotNull(searchResultAdvanced); assertEquals(1, searchResultAdvanced.getTotalHits()); assertEquals( workflow.getWorkflowId(), searchResultAdvanced.getResults().get(0).getWorkflowId()); SearchResult<Workflow> searchResultV2Advanced = workflowClient.searchV2(0, 1, null, null, "workflowType='" + def.getName() + "'"); assertNotNull(searchResultV2Advanced); assertEquals(1, searchResultV2Advanced.getTotalHits()); assertEquals( workflow.getWorkflowId(), searchResultV2Advanced.getResults().get(0).getWorkflowId()); SearchResult<TaskSummary> taskSearchResult = taskClient.search("taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResult); assertEquals(1, searchResultV2Advanced.getTotalHits()); assertEquals(t0.getName(), taskSearchResult.getResults().get(0).getTaskDefName()); SearchResult<TaskSummary> taskSearchResultAdvanced = taskClient.search(0, 1, null, null, "taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResultAdvanced); assertEquals(1, taskSearchResultAdvanced.getTotalHits()); assertEquals(t0.getName(), taskSearchResultAdvanced.getResults().get(0).getTaskDefName()); SearchResult<Task> taskSearchResultV2 = taskClient.searchV2("taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResultV2); assertEquals(1, searchResultV2Advanced.getTotalHits()); assertEquals( t0.getTaskReferenceName(), taskSearchResultV2.getResults().get(0).getReferenceTaskName()); SearchResult<Task> taskSearchResultV2Advanced = taskClient.searchV2(0, 1, null, null, "taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResultV2Advanced); assertEquals(1, taskSearchResultV2Advanced.getTotalHits()); assertEquals( t0.getTaskReferenceName(), taskSearchResultV2Advanced.getResults().get(0).getReferenceTaskName()); workflowClient.terminateWorkflow(workflowId, "terminate reason"); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.TERMINATED, workflow.getStatus()); workflowClient.restart(workflowId, false); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); assertEquals(1, workflow.getTasks().size()); } }
6,981
0
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration/http/HttpEndToEndTest.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration.http; import org.junit.Before; import com.netflix.conductor.client.http.EventClient; import com.netflix.conductor.client.http.MetadataClient; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.http.WorkflowClient; public class HttpEndToEndTest extends AbstractHttpEndToEndTest { @Before public void init() { apiRoot = String.format("http://localhost:%d/api/", port); taskClient = new TaskClient(); taskClient.setRootURI(apiRoot); workflowClient = new WorkflowClient(); workflowClient.setRootURI(apiRoot); metadataClient = new MetadataClient(); metadataClient.setRootURI(apiRoot); eventClient = new EventClient(); eventClient.setRootURI(apiRoot); } }
6,982
0
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/integration/http/AbstractHttpEndToEndTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration.http; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; import org.springframework.boot.web.server.LocalServerPort; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringRunner; import com.netflix.conductor.client.exception.ConductorClientException; import com.netflix.conductor.client.http.EventClient; import com.netflix.conductor.client.http.MetadataClient; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.http.WorkflowClient; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.common.validation.ValidationError; import com.netflix.conductor.test.integration.AbstractEndToEndTest; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT) @TestPropertySource(locations = "classpath:application-integrationtest.properties") public abstract class AbstractHttpEndToEndTest extends AbstractEndToEndTest { @LocalServerPort protected int port; protected static String apiRoot; protected static TaskClient taskClient; protected static WorkflowClient workflowClient; protected static MetadataClient metadataClient; protected static EventClient eventClient; @Override protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { StartWorkflowRequest workflowRequest = new StartWorkflowRequest() .withName(workflowExecutionName) .withWorkflowDef(workflowDefinition); return workflowClient.startWorkflow(workflowRequest); } @Override protected Workflow getWorkflow(String workflowId, boolean includeTasks) { return workflowClient.getWorkflow(workflowId, includeTasks); } @Override protected TaskDef getTaskDefinition(String taskName) { return metadataClient.getTaskDef(taskName); } @Override protected void registerTaskDefinitions(List<TaskDef> taskDefinitionList) { metadataClient.registerTaskDefs(taskDefinitionList); } @Override protected void registerWorkflowDefinition(WorkflowDef workflowDefinition) { metadataClient.registerWorkflowDef(workflowDefinition); } @Override protected void registerEventHandler(EventHandler eventHandler) { eventClient.registerEventHandler(eventHandler); } @Override protected Iterator<EventHandler> getEventHandlers(String event, boolean activeOnly) { return eventClient.getEventHandlers(event, activeOnly).iterator(); } @Test public void testAll() throws Exception { createAndRegisterTaskDefinitions("t", 5); WorkflowDef def = new WorkflowDef(); def.setName("test"); def.setOwnerEmail(DEFAULT_EMAIL_ADDRESS); WorkflowTask t0 = new WorkflowTask(); t0.setName("t0"); t0.setWorkflowTaskType(TaskType.SIMPLE); t0.setTaskReferenceName("t0"); WorkflowTask t1 = new WorkflowTask(); t1.setName("t1"); t1.setWorkflowTaskType(TaskType.SIMPLE); t1.setTaskReferenceName("t1"); def.getTasks().add(t0); def.getTasks().add(t1); metadataClient.registerWorkflowDef(def); WorkflowDef workflowDefinitionFromSystem = metadataClient.getWorkflowDef(def.getName(), null); assertNotNull(workflowDefinitionFromSystem); assertEquals(def, workflowDefinitionFromSystem); String correlationId = "test_corr_id"; StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest() .withName(def.getName()) .withCorrelationId(correlationId) .withPriority(50) .withInput(new HashMap<>()); String workflowId = workflowClient.startWorkflow(startWorkflowRequest); assertNotNull(workflowId); Workflow workflow = workflowClient.getWorkflow(workflowId, false); assertEquals(0, workflow.getTasks().size()); assertEquals(workflowId, workflow.getWorkflowId()); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); assertEquals(1, workflow.getTasks().size()); assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); assertEquals(workflowId, workflow.getWorkflowId()); int queueSize = taskClient.getQueueSizeForTask(workflow.getTasks().get(0).getTaskType()); assertEquals(1, queueSize); List<String> runningIds = workflowClient.getRunningWorkflow(def.getName(), def.getVersion()); assertNotNull(runningIds); assertEquals(1, runningIds.size()); assertEquals(workflowId, runningIds.get(0)); List<Task> polled = taskClient.batchPollTasksByTaskType("non existing task", "test", 1, 100); assertNotNull(polled); assertEquals(0, polled.size()); polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertEquals(1, polled.size()); assertEquals(t0.getName(), polled.get(0).getTaskDefName()); Task task = polled.get(0); task.getOutputData().put("key1", "value1"); task.setStatus(Status.COMPLETED); taskClient.updateTask(new TaskResult(task)); polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertTrue(polled.toString(), polled.isEmpty()); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); assertEquals(2, workflow.getTasks().size()); assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); assertEquals(t1.getTaskReferenceName(), workflow.getTasks().get(1).getReferenceTaskName()); assertEquals(Task.Status.COMPLETED, workflow.getTasks().get(0).getStatus()); assertEquals(Task.Status.SCHEDULED, workflow.getTasks().get(1).getStatus()); Task taskById = taskClient.getTaskDetails(task.getTaskId()); assertNotNull(taskById); assertEquals(task.getTaskId(), taskById.getTaskId()); queueSize = taskClient.getQueueSizeForTask(workflow.getTasks().get(1).getTaskType()); assertEquals(1, queueSize); Thread.sleep(1000); SearchResult<WorkflowSummary> searchResult = workflowClient.search("workflowType='" + def.getName() + "'"); assertNotNull(searchResult); assertEquals(1, searchResult.getTotalHits()); assertEquals(workflow.getWorkflowId(), searchResult.getResults().get(0).getWorkflowId()); SearchResult<Workflow> searchResultV2 = workflowClient.searchV2("workflowType='" + def.getName() + "'"); assertNotNull(searchResultV2); assertEquals(1, searchResultV2.getTotalHits()); assertEquals(workflow.getWorkflowId(), searchResultV2.getResults().get(0).getWorkflowId()); SearchResult<WorkflowSummary> searchResultAdvanced = workflowClient.search(0, 1, null, null, "workflowType='" + def.getName() + "'"); assertNotNull(searchResultAdvanced); assertEquals(1, searchResultAdvanced.getTotalHits()); assertEquals( workflow.getWorkflowId(), searchResultAdvanced.getResults().get(0).getWorkflowId()); SearchResult<Workflow> searchResultV2Advanced = workflowClient.searchV2(0, 1, null, null, "workflowType='" + def.getName() + "'"); assertNotNull(searchResultV2Advanced); assertEquals(1, searchResultV2Advanced.getTotalHits()); assertEquals( workflow.getWorkflowId(), searchResultV2Advanced.getResults().get(0).getWorkflowId()); SearchResult<TaskSummary> taskSearchResult = taskClient.search("taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResult); assertEquals(1, searchResultV2Advanced.getTotalHits()); assertEquals(t0.getName(), taskSearchResult.getResults().get(0).getTaskDefName()); SearchResult<TaskSummary> taskSearchResultAdvanced = taskClient.search(0, 1, null, null, "taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResultAdvanced); assertEquals(1, taskSearchResultAdvanced.getTotalHits()); assertEquals(t0.getName(), taskSearchResultAdvanced.getResults().get(0).getTaskDefName()); SearchResult<Task> taskSearchResultV2 = taskClient.searchV2("taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResultV2); assertEquals(1, searchResultV2Advanced.getTotalHits()); assertEquals( t0.getTaskReferenceName(), taskSearchResultV2.getResults().get(0).getReferenceTaskName()); SearchResult<Task> taskSearchResultV2Advanced = taskClient.searchV2(0, 1, null, null, "taskType='" + t0.getName() + "'"); assertNotNull(taskSearchResultV2Advanced); assertEquals(1, taskSearchResultV2Advanced.getTotalHits()); assertEquals( t0.getTaskReferenceName(), taskSearchResultV2Advanced.getResults().get(0).getReferenceTaskName()); workflowClient.terminateWorkflow(workflowId, "terminate reason"); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.TERMINATED, workflow.getStatus()); workflowClient.restart(workflowId, false); workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); assertEquals(1, workflow.getTasks().size()); workflowClient.skipTaskFromWorkflow(workflowId, "t1"); } @Test(expected = ConductorClientException.class) public void testMetadataWorkflowDefinition() { String workflowDefName = "testWorkflowDefMetadata"; WorkflowDef def = new WorkflowDef(); def.setName(workflowDefName); def.setVersion(1); WorkflowTask t0 = new WorkflowTask(); t0.setName("t0"); t0.setWorkflowTaskType(TaskType.SIMPLE); t0.setTaskReferenceName("t0"); WorkflowTask t1 = new WorkflowTask(); t1.setName("t1"); t1.setWorkflowTaskType(TaskType.SIMPLE); t1.setTaskReferenceName("t1"); def.getTasks().add(t0); def.getTasks().add(t1); metadataClient.registerWorkflowDef(def); metadataClient.unregisterWorkflowDef(workflowDefName, 1); try { metadataClient.getWorkflowDef(workflowDefName, 1); } catch (ConductorClientException e) { int statusCode = e.getStatus(); String errorMessage = e.getMessage(); boolean retryable = e.isRetryable(); assertEquals(404, statusCode); assertEquals( "No such workflow found by name: testWorkflowDefMetadata, version: 1", errorMessage); assertFalse(retryable); throw e; } } @Test(expected = ConductorClientException.class) public void testInvalidResource() { MetadataClient metadataClient = new MetadataClient(); metadataClient.setRootURI(String.format("%sinvalid", apiRoot)); WorkflowDef def = new WorkflowDef(); def.setName("testWorkflowDel"); def.setVersion(1); try { metadataClient.registerWorkflowDef(def); } catch (ConductorClientException e) { int statusCode = e.getStatus(); boolean retryable = e.isRetryable(); assertEquals(404, statusCode); assertFalse(retryable); throw e; } } @Test(expected = ConductorClientException.class) public void testUpdateWorkflow() { TaskDef taskDef = new TaskDef(); taskDef.setName("taskUpdate"); ArrayList<TaskDef> tasks = new ArrayList<>(); tasks.add(taskDef); metadataClient.registerTaskDefs(tasks); WorkflowDef def = new WorkflowDef(); def.setName("testWorkflowDel"); def.setVersion(1); WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName("taskUpdate"); workflowTask.setTaskReferenceName("taskUpdate"); List<WorkflowTask> workflowTaskList = new ArrayList<>(); workflowTaskList.add(workflowTask); def.setTasks(workflowTaskList); List<WorkflowDef> workflowList = new ArrayList<>(); workflowList.add(def); metadataClient.registerWorkflowDef(def); def.setVersion(2); metadataClient.updateWorkflowDefs(workflowList); WorkflowDef def1 = metadataClient.getWorkflowDef(def.getName(), 2); assertNotNull(def1); try { metadataClient.getTaskDef("test"); } catch (ConductorClientException e) { int statuCode = e.getStatus(); assertEquals(404, statuCode); assertEquals("No such taskType found by name: test", e.getMessage()); assertFalse(e.isRetryable()); throw e; } } @Test public void testStartWorkflow() { StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); try { workflowClient.startWorkflow(startWorkflowRequest); fail("StartWorkflow#name is null but NullPointerException was not thrown"); } catch (NullPointerException e) { assertEquals("Workflow name cannot be null or empty", e.getMessage()); } catch (Exception e) { fail("StartWorkflow#name is null but NullPointerException was not thrown"); } } @Test(expected = ConductorClientException.class) public void testUpdateTask() { TaskResult taskResult = new TaskResult(); try { taskClient.updateTask(taskResult); } catch (ConductorClientException e) { assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List<ValidationError> errors = e.getValidationErrors(); List<String> errorMessages = errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); assertEquals(2, errors.size()); assertTrue(errorMessages.contains("Workflow Id cannot be null or empty")); throw e; } } @Test(expected = ConductorClientException.class) public void testGetWorfklowNotFound() { try { workflowClient.getWorkflow("w123", true); } catch (ConductorClientException e) { assertEquals(404, e.getStatus()); assertEquals("No such workflow found by id: w123", e.getMessage()); assertFalse(e.isRetryable()); throw e; } } @Test(expected = ConductorClientException.class) public void testEmptyCreateWorkflowDef() { try { WorkflowDef workflowDef = new WorkflowDef(); metadataClient.registerWorkflowDef(workflowDef); } catch (ConductorClientException e) { assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List<ValidationError> errors = e.getValidationErrors(); List<String> errorMessages = errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); throw e; } } @Test(expected = ConductorClientException.class) public void testUpdateWorkflowDef() { try { WorkflowDef workflowDef = new WorkflowDef(); List<WorkflowDef> workflowDefList = new ArrayList<>(); workflowDefList.add(workflowDef); metadataClient.updateWorkflowDefs(workflowDefList); } catch (ConductorClientException e) { assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List<ValidationError> errors = e.getValidationErrors(); List<String> errorMessages = errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); assertEquals(3, errors.size()); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); assertTrue(errorMessages.contains("ownerEmail cannot be empty")); throw e; } } @Test public void testTaskByTaskId() { try { taskClient.getTaskDetails("test999"); } catch (ConductorClientException e) { assertEquals(404, e.getStatus()); assertEquals("No such task found by taskId: test999", e.getMessage()); } } @Test public void testListworkflowsByCorrelationId() { workflowClient.getWorkflows("test", "test12", false, false); } @Test(expected = ConductorClientException.class) public void testCreateInvalidWorkflowDef() { try { WorkflowDef workflowDef = new WorkflowDef(); List<WorkflowDef> workflowDefList = new ArrayList<>(); workflowDefList.add(workflowDef); metadataClient.registerWorkflowDef(workflowDef); } catch (ConductorClientException e) { assertEquals(3, e.getValidationErrors().size()); assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List<ValidationError> errors = e.getValidationErrors(); List<String> errorMessages = errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); assertTrue(errorMessages.contains("ownerEmail cannot be empty")); throw e; } } @Test(expected = ConductorClientException.class) public void testUpdateTaskDefNameNull() { TaskDef taskDef = new TaskDef(); try { metadataClient.updateTaskDef(taskDef); } catch (ConductorClientException e) { assertEquals(2, e.getValidationErrors().size()); assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List<ValidationError> errors = e.getValidationErrors(); List<String> errorMessages = errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); assertTrue(errorMessages.contains("TaskDef name cannot be null or empty")); assertTrue(errorMessages.contains("ownerEmail cannot be empty")); throw e; } } @Test(expected = IllegalArgumentException.class) public void testGetTaskDefNotExisting() { metadataClient.getTaskDef(""); } @Test(expected = ConductorClientException.class) public void testUpdateWorkflowDefNameNull() { WorkflowDef workflowDef = new WorkflowDef(); List<WorkflowDef> list = new ArrayList<>(); list.add(workflowDef); try { metadataClient.updateWorkflowDefs(list); } catch (ConductorClientException e) { assertEquals(3, e.getValidationErrors().size()); assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List<ValidationError> errors = e.getValidationErrors(); List<String> errorMessages = errors.stream().map(ValidationError::getMessage).collect(Collectors.toList()); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); assertTrue(errorMessages.contains("ownerEmail cannot be empty")); throw e; } } }
6,983
0
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/utils/UserTask.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.utils; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.util.concurrent.Uninterruptibles; @Component(UserTask.NAME) public class UserTask extends WorkflowSystemTask { private static final Logger LOGGER = LoggerFactory.getLogger(UserTask.class); public static final String NAME = "USER_TASK"; private final ObjectMapper objectMapper; private static final TypeReference<Map<String, Map<String, List<Object>>>> mapStringListObjects = new TypeReference<>() {}; @Autowired public UserTask(ObjectMapper objectMapper) { super(NAME); this.objectMapper = objectMapper; LOGGER.info("Initialized system task - {}", getClass().getCanonicalName()); } @Override public void start(WorkflowModel workflow, TaskModel task, WorkflowExecutor executor) { Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); if (task.getWorkflowTask().isAsyncComplete()) { task.setStatus(TaskModel.Status.IN_PROGRESS); } else { Map<String, Map<String, List<Object>>> map = objectMapper.convertValue(task.getInputData(), mapStringListObjects); Map<String, Object> output = new HashMap<>(); Map<String, List<Object>> defaultLargeInput = new HashMap<>(); defaultLargeInput.put("TEST_SAMPLE", Collections.singletonList("testDefault")); output.put( "size", map.getOrDefault("largeInput", defaultLargeInput).get("TEST_SAMPLE").size()); task.setOutputData(output); task.setStatus(TaskModel.Status.COMPLETED); } } @Override public boolean isAsync() { return true; } }
6,984
0
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test
Create_ds/conductor/test-harness/src/test/java/com/netflix/conductor/test/utils/MockExternalPayloadStorage.java
/* * Copyright 2021 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.utils; import java.io.*; import java.nio.file.Files; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.ExternalStorageLocation; import com.netflix.conductor.common.utils.ExternalPayloadStorage; import com.fasterxml.jackson.databind.ObjectMapper; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SIMPLE; import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW; /** A {@link ExternalPayloadStorage} implementation that stores payload in file. */ @ConditionalOnProperty(name = "conductor.external-payload-storage.type", havingValue = "mock") @Component public class MockExternalPayloadStorage implements ExternalPayloadStorage { private static final Logger LOGGER = LoggerFactory.getLogger(MockExternalPayloadStorage.class); private final ObjectMapper objectMapper; private final File payloadDir; @Autowired public MockExternalPayloadStorage(ObjectMapper objectMapper) throws IOException { this.objectMapper = objectMapper; this.payloadDir = Files.createTempDirectory("payloads").toFile(); LOGGER.info( "{} initialized in directory: {}", this.getClass().getSimpleName(), payloadDir.getAbsolutePath()); } @Override public ExternalStorageLocation getLocation( Operation operation, PayloadType payloadType, String path) { ExternalStorageLocation location = new ExternalStorageLocation(); location.setPath(UUID.randomUUID() + ".json"); return location; } @Override public void upload(String path, InputStream payload, long payloadSize) { File file = new File(payloadDir, path); String filePath = file.getAbsolutePath(); try { if (!file.exists() && file.createNewFile()) { LOGGER.debug("Created file: {}", filePath); } IOUtils.copy(payload, new FileOutputStream(file)); LOGGER.debug("Written to {}", filePath); } catch (IOException e) { // just handle this exception here and return empty map so that test will fail in case // this exception is thrown LOGGER.error("Error writing to {}", filePath); } finally { try { if (payload != null) { payload.close(); } } catch (IOException e) { LOGGER.warn("Unable to close input stream when writing to file"); } } } @Override public InputStream download(String path) { try { LOGGER.debug("Reading from {}", path); return new FileInputStream(new File(payloadDir, path)); } catch (IOException e) { LOGGER.error("Error reading {}", path, e); return null; } } public void upload(String path, Map<String, Object> payload) { try { InputStream bais = new ByteArrayInputStream(objectMapper.writeValueAsBytes(payload)); upload(path, bais, 0); } catch (IOException e) { LOGGER.error("Error serializing map to json", e); } } public InputStream readOutputDotJson() { return MockExternalPayloadStorage.class.getResourceAsStream("/output.json"); } @SuppressWarnings("unchecked") public Map<String, Object> curateDynamicForkLargePayload() { Map<String, Object> dynamicForkLargePayload = new HashMap<>(); try { InputStream inputStream = readOutputDotJson(); Map<String, Object> largePayload = objectMapper.readValue(inputStream, Map.class); WorkflowTask simpleWorkflowTask = new WorkflowTask(); simpleWorkflowTask.setName("integration_task_10"); simpleWorkflowTask.setTaskReferenceName("t10"); simpleWorkflowTask.setType(TASK_TYPE_SIMPLE); simpleWorkflowTask.setInputParameters( Collections.singletonMap("p1", "${workflow.input.imageType}")); WorkflowDef subWorkflowDef = new WorkflowDef(); subWorkflowDef.setName("one_task_workflow"); subWorkflowDef.setVersion(1); subWorkflowDef.setTasks(Collections.singletonList(simpleWorkflowTask)); SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); subWorkflowParams.setName("one_task_workflow"); subWorkflowParams.setVersion(1); subWorkflowParams.setWorkflowDef(subWorkflowDef); WorkflowTask subWorkflowTask = new WorkflowTask(); subWorkflowTask.setName("large_payload_subworkflow"); subWorkflowTask.setType(TASK_TYPE_SUB_WORKFLOW); subWorkflowTask.setTaskReferenceName("large_payload_subworkflow"); subWorkflowTask.setInputParameters(largePayload); subWorkflowTask.setSubWorkflowParam(subWorkflowParams); dynamicForkLargePayload.put("dynamicTasks", List.of(subWorkflowTask)); dynamicForkLargePayload.put( "dynamicTasksInput", Map.of("large_payload_subworkflow", largePayload)); } catch (IOException e) { // just handle this exception here and return empty map so that test will fail in case // this exception is thrown } return dynamicForkLargePayload; } public Map<String, Object> downloadPayload(String path) { InputStream inputStream = download(path); if (inputStream != null) { try { Map<String, Object> largePayload = objectMapper.readValue(inputStream, Map.class); return largePayload; } catch (IOException e) { LOGGER.error("Error in downloading payload for path {}", path, e); } } return new HashMap<>(); } public Map<String, Object> createLargePayload(int repeat) { Map<String, Object> largePayload = new HashMap<>(); try { InputStream inputStream = readOutputDotJson(); Map<String, Object> payload = objectMapper.readValue(inputStream, Map.class); for (int i = 0; i < repeat; i++) { largePayload.put(String.valueOf(i), payload); } } catch (IOException e) { // just handle this exception here and return empty map so that test will fail in case // this exception is thrown } return largePayload; } }
6,985
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/config/TestPropertyFactory.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.config; import org.junit.Test; import com.netflix.conductor.client.worker.Worker; import com.netflix.conductor.common.metadata.tasks.TaskResult; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; public class TestPropertyFactory { @Test public void testIdentity() { Worker worker = Worker.create("Test2", TaskResult::new); assertNotNull(worker.getIdentity()); boolean paused = worker.paused(); assertFalse("Paused? " + paused, paused); } @Test public void test() { int val = PropertyFactory.getInteger("workerB", "pollingInterval", 100); assertEquals("got: " + val, 2, val); assertEquals( 100, PropertyFactory.getInteger("workerB", "propWithoutValue", 100).intValue()); assertFalse( PropertyFactory.getBoolean( "workerB", "paused", true)); // Global value set to 'false' assertTrue( PropertyFactory.getBoolean( "workerA", "paused", false)); // WorkerA value set to 'true' assertEquals( 42, PropertyFactory.getInteger("workerA", "batchSize", 42) .intValue()); // No global value set, so will return the default value // supplied assertEquals( 84, PropertyFactory.getInteger("workerB", "batchSize", 42) .intValue()); // WorkerB's value set to 84 assertEquals("domainA", PropertyFactory.getString("workerA", "domain", null)); assertEquals("domainB", PropertyFactory.getString("workerB", "domain", null)); assertNull(PropertyFactory.getString("workerC", "domain", null)); // Non Existent } @Test public void testProperty() { Worker worker = Worker.create("Test", TaskResult::new); boolean paused = worker.paused(); assertTrue("Paused? " + paused, paused); } }
6,986
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/testing/AbstractWorkflowTests.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.testing; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestInstance; import com.netflix.conductor.client.http.MetadataClient; import com.netflix.conductor.client.http.WorkflowClient; import com.netflix.conductor.common.config.ObjectMapperProvider; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowTestRequest; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import static org.junit.jupiter.api.Assertions.assertNotNull; @TestInstance(TestInstance.Lifecycle.PER_CLASS) public abstract class AbstractWorkflowTests { protected static ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); protected static TypeReference<Map<String, List<WorkflowTestRequest.TaskMock>>> mockType = new TypeReference<Map<String, List<WorkflowTestRequest.TaskMock>>>() {}; protected MetadataClient metadataClient; protected WorkflowClient workflowClient; @BeforeAll public void setup() { String baseURL = "http://localhost:8080/api/"; metadataClient = new MetadataClient(); metadataClient.setRootURI(baseURL); workflowClient = new WorkflowClient(); workflowClient.setRootURI(baseURL); } protected WorkflowTestRequest getWorkflowTestRequest(WorkflowDef def) throws IOException { WorkflowTestRequest testRequest = new WorkflowTestRequest(); testRequest.setInput(new HashMap<>()); testRequest.setName(def.getName()); testRequest.setVersion(def.getVersion()); testRequest.setWorkflowDef(def); Map<String, List<WorkflowTestRequest.TaskMock>> taskRefToMockOutput = new HashMap<>(); for (WorkflowTask task : def.collectTasks()) { List<WorkflowTestRequest.TaskMock> taskRuns = new LinkedList<>(); WorkflowTestRequest.TaskMock mock = new WorkflowTestRequest.TaskMock(); mock.setStatus(TaskResult.Status.COMPLETED); Map<String, Object> output = new HashMap<>(); output.put("response", Map.of()); mock.setOutput(output); taskRuns.add(mock); taskRefToMockOutput.put(task.getTaskReferenceName(), taskRuns); if (task.getType().equals(TaskType.SUB_WORKFLOW.name())) { Object inlineSubWorkflowDefObj = task.getSubWorkflowParam().getWorkflowDefinition(); if (inlineSubWorkflowDefObj != null) { // If not null, it represents WorkflowDef object WorkflowDef inlineSubWorkflowDef = (WorkflowDef) inlineSubWorkflowDefObj; WorkflowTestRequest subWorkflowTestRequest = getWorkflowTestRequest(inlineSubWorkflowDef); testRequest .getSubWorkflowTestRequest() .put(task.getTaskReferenceName(), subWorkflowTestRequest); } else { // Inline definition is null String subWorkflowName = task.getSubWorkflowParam().getName(); // Load up the sub workflow from the JSON WorkflowDef subWorkflowDef = getWorkflowDef("/workflows/" + subWorkflowName + ".json"); assertNotNull(subWorkflowDef); WorkflowTestRequest subWorkflowTestRequest = getWorkflowTestRequest(subWorkflowDef); testRequest .getSubWorkflowTestRequest() .put(task.getTaskReferenceName(), subWorkflowTestRequest); } } } testRequest.setTaskRefToMockOutput(taskRefToMockOutput); return testRequest; } protected WorkflowDef getWorkflowDef(String path) throws IOException { InputStream inputStream = AbstractWorkflowTests.class.getResourceAsStream(path); if (inputStream == null) { throw new IOException("No file found at " + path); } return objectMapper.readValue(new InputStreamReader(inputStream), WorkflowDef.class); } protected Workflow getWorkflow(String path) throws IOException { InputStream inputStream = AbstractWorkflowTests.class.getResourceAsStream(path); if (inputStream == null) { throw new IOException("No file found at " + path); } return objectMapper.readValue(new InputStreamReader(inputStream), Workflow.class); } protected Map<String, List<WorkflowTestRequest.TaskMock>> getTestInputs(String path) throws IOException { InputStream inputStream = AbstractWorkflowTests.class.getResourceAsStream(path); if (inputStream == null) { throw new IOException("No file found at " + path); } return objectMapper.readValue(new InputStreamReader(inputStream), mockType); } }
6,987
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/testing/LoanWorkflowTest.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.testing; import java.io.IOException; import java.math.BigDecimal; import java.util.List; import java.util.Map; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowTestRequest; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; /** Unit test a workflow with inputs read from a file. */ public class LoanWorkflowTest extends AbstractWorkflowTests { /** Uses mock inputs to verify the workflow execution and input/outputs of the tasks */ // Tests are commented out since it requires a running server // @Test public void verifyWorkflowExecutionWithMockInputs() throws IOException { WorkflowDef def = getWorkflowDef("/workflows/calculate_loan_workflow.json"); assertNotNull(def); Map<String, List<WorkflowTestRequest.TaskMock>> testInputs = getTestInputs("/test_data/loan_workflow_input.json"); assertNotNull(testInputs); WorkflowTestRequest testRequest = new WorkflowTestRequest(); testRequest.setWorkflowDef(def); LoanWorkflowInput workflowInput = new LoanWorkflowInput(); workflowInput.setUserEmail("user@example.com"); workflowInput.setLoanAmount(new BigDecimal(11_000)); testRequest.setInput(objectMapper.convertValue(workflowInput, Map.class)); testRequest.setTaskRefToMockOutput(testInputs); testRequest.setName(def.getName()); testRequest.setVersion(def.getVersion()); Workflow execution = workflowClient.testWorkflow(testRequest); assertNotNull(execution); // Assert that the workflow completed successfully assertEquals(Workflow.WorkflowStatus.COMPLETED, execution.getStatus()); // Ensure the inputs were captured correctly assertEquals( workflowInput.getLoanAmount().toString(), String.valueOf(execution.getInput().get("loanAmount"))); assertEquals(workflowInput.getUserEmail(), execution.getInput().get("userEmail")); // A total of 3 tasks were executed assertEquals(3, execution.getTasks().size()); Task fetchUserDetails = execution.getTasks().get(0); Task getCreditScore = execution.getTasks().get(1); Task calculateLoanAmount = execution.getTasks().get(2); // fetch user details received the correct input from the workflow assertEquals( workflowInput.getUserEmail(), fetchUserDetails.getInputData().get("userEmail")); // And that the task produced the right output int userAccountNo = 12345; assertEquals(userAccountNo, fetchUserDetails.getOutputData().get("userAccount")); // get credit score received the right account number from the output of the fetch user // details assertEquals(userAccountNo, getCreditScore.getInputData().get("userAccountNumber")); int expectedCreditRating = 750; // The task produced the right output assertEquals(expectedCreditRating, getCreditScore.getOutputData().get("creditRating")); // Calculate loan amount gets the right loan amount from workflow input assertEquals( workflowInput.getLoanAmount().toString(), String.valueOf(calculateLoanAmount.getInputData().get("loanAmount"))); // Calculate loan amount gets the right credit rating from the previous task assertEquals(expectedCreditRating, calculateLoanAmount.getInputData().get("creditRating")); int authorizedLoanAmount = 10_000; assertEquals( authorizedLoanAmount, calculateLoanAmount.getOutputData().get("authorizedLoanAmount")); // Finally, lets verify the workflow outputs assertEquals(userAccountNo, execution.getOutput().get("accountNumber")); assertEquals(expectedCreditRating, execution.getOutput().get("creditRating")); assertEquals(authorizedLoanAmount, execution.getOutput().get("authorizedLoanAmount")); System.out.println(execution); } }
6,988
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/testing/SubWorkflowTest.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.testing; import java.io.IOException; import java.util.List; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowTestRequest; import static org.junit.jupiter.api.Assertions.*; /** Demonstrates how to test workflows that contain sub-workflows */ public class SubWorkflowTest extends AbstractWorkflowTests { // @Test // Tests are commented out since it requires a running server public void verifySubWorkflowExecutions() throws IOException { WorkflowDef def = getWorkflowDef("/workflows/kitchensink.json"); assertNotNull(def); WorkflowDef subWorkflowDef = getWorkflowDef("/workflows/PopulationMinMax.json"); metadataClient.registerWorkflowDef(subWorkflowDef); WorkflowTestRequest testRequest = getWorkflowTestRequest(def); // The following are the dynamic tasks which are not present in the workflow definition but // are created by dynamic fork testRequest .getTaskRefToMockOutput() .put("_x_test_worker_0_0", List.of(new WorkflowTestRequest.TaskMock())); testRequest .getTaskRefToMockOutput() .put("_x_test_worker_0_1", List.of(new WorkflowTestRequest.TaskMock())); testRequest .getTaskRefToMockOutput() .put("_x_test_worker_0_2", List.of(new WorkflowTestRequest.TaskMock())); testRequest .getTaskRefToMockOutput() .put("simple_task_1__1", List.of(new WorkflowTestRequest.TaskMock())); testRequest .getTaskRefToMockOutput() .put("simple_task_5", List.of(new WorkflowTestRequest.TaskMock())); Workflow execution = workflowClient.testWorkflow(testRequest); assertNotNull(execution); // Verfiy that the workflow COMPLETES assertEquals(Workflow.WorkflowStatus.COMPLETED, execution.getStatus()); // That the workflow executes a wait task assertTrue( execution.getTasks().stream() .anyMatch(t -> t.getReferenceTaskName().equals("wait"))); // That the call_made variable was set to True assertEquals(true, execution.getVariables().get("call_made")); // Total number of tasks executed are 17 assertEquals(17, execution.getTasks().size()); } }
6,989
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/testing/LoanWorkflowInput.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.testing; import java.math.BigDecimal; public class LoanWorkflowInput { private String userEmail; private BigDecimal loanAmount; public String getUserEmail() { return userEmail; } public void setUserEmail(String userEmail) { this.userEmail = userEmail; } public BigDecimal getLoanAmount() { return loanAmount; } public void setLoanAmount(BigDecimal loanAmount) { this.loanAmount = loanAmount; } }
6,990
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/testing/RegressionTest.java
/* * Copyright 2023 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.testing; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowTestRequest; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; /** * This test demonstrates how to use execution data from the previous executed workflows as golden * input and output and use them to regression test the workflow definition. * * <p>Regression tests are useful ensuring any changes to the workflow definition does not change * the behavior. */ public class RegressionTest extends AbstractWorkflowTests { // @Test // Tests are commented out since it requires a running server // Uses a previously executed successful run to verify the workflow execution, and it's output. public void verifyWorkflowOutput() throws IOException, ExecutionException, InterruptedException, TimeoutException { // Workflow Definition WorkflowDef def = getWorkflowDef("/workflows/workflow1.json"); // Golden output to verify against Workflow workflow = getWorkflow("/test_data/workflow1_run.json"); WorkflowTestRequest testRequest = new WorkflowTestRequest(); testRequest.setInput(new HashMap<>()); testRequest.setName(def.getName()); testRequest.setVersion(def.getVersion()); testRequest.setWorkflowDef(def); Map<String, List<WorkflowTestRequest.TaskMock>> taskRefToMockOutput = new HashMap<>(); for (Task task : workflow.getTasks()) { List<WorkflowTestRequest.TaskMock> taskRuns = new ArrayList<>(); WorkflowTestRequest.TaskMock mock = new WorkflowTestRequest.TaskMock(); mock.setStatus(TaskResult.Status.valueOf(task.getStatus().name())); mock.setOutput(task.getOutputData()); taskRuns.add(mock); taskRefToMockOutput.put(def.getTasks().get(0).getTaskReferenceName(), taskRuns); } testRequest.setTaskRefToMockOutput(taskRefToMockOutput); Workflow execution = workflowClient.testWorkflow(testRequest); assertNotNull(execution); assertEquals(workflow.getTasks().size(), execution.getTasks().size()); } }
6,991
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/automator/TaskPollExecutorTest.java
/* * Copyright 2022 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.automator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.*; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import com.netflix.appinfo.InstanceInfo; import com.netflix.conductor.client.exception.ConductorClientException; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.worker.Worker; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.discovery.EurekaClient; import static com.netflix.conductor.common.metadata.tasks.TaskResult.Status.COMPLETED; import static com.netflix.conductor.common.metadata.tasks.TaskResult.Status.IN_PROGRESS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; public class TaskPollExecutorTest { private static final String TEST_TASK_DEF_NAME = "test"; private static final Map<String, Integer> TASK_THREAD_MAP = Collections.singletonMap(TEST_TASK_DEF_NAME, 1); @Test public void testTaskExecutionException() throws InterruptedException { Worker worker = Worker.create( TEST_TASK_DEF_NAME, task -> { throw new NoSuchMethodError(); }); TaskClient taskClient = Mockito.mock(TaskClient.class); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-%d", TASK_THREAD_MAP); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(testTask())); when(taskClient.ack(any(), any())).thenReturn(true); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { assertEquals("test-worker-1", Thread.currentThread().getName()); Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(TaskResult.Status.FAILED, result.getStatus()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient).updateTask(any()); } @SuppressWarnings("rawtypes") @Test public void testMultipleTasksExecution() throws InterruptedException { String outputKey = "KEY"; Task task = testTask(); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); when(worker.execute(any())) .thenAnswer( new Answer() { private int count = 0; Map<String, Object> outputMap = new HashMap<>(); public TaskResult answer(InvocationOnMock invocation) throws InterruptedException { // Sleep for 2 seconds to simulate task execution Thread.sleep(2000L); TaskResult taskResult = new TaskResult(task); outputMap.put(outputKey, count++); taskResult.setOutputData(outputMap); return taskResult; } }); TaskClient taskClient = Mockito.mock(TaskClient.class); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); when(taskClient.ack(any(), any())).thenReturn(true); CountDownLatch latch = new CountDownLatch(3); doAnswer( new Answer() { private int count = 0; public TaskResult answer(InvocationOnMock invocation) { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(IN_PROGRESS, result.getStatus()); assertEquals(count, result.getOutputData().get(outputKey)); count++; latch.countDown(); return null; } }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); // execute() is called 3 times on the worker (once for each task) verify(worker, times(3)).execute(any()); verify(taskClient, times(3)).updateTask(any()); } @SuppressWarnings("unchecked") @Test public void testLargePayloadCanFailUpdateWithRetry() throws InterruptedException { Task task = testTask(); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); when(taskClient.ack(any(), any())).thenReturn(true); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertNull(result.getReasonForIncompletion()); result.setReasonForIncompletion("some_reason_1"); throw new ConductorClientException(); }) .when(taskClient) .evaluateAndUploadLargePayload(any(Map.class), any()); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { latch.countDown(); return null; }) .when(worker) .onErrorUpdate(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); // When evaluateAndUploadLargePayload fails indefinitely, task update shouldn't be called. verify(taskClient, times(0)).updateTask(any()); } @Test public void testLargePayloadLocationUpdate() throws InterruptedException { Task task = testTask(); String largePayloadLocation = "large_payload_location"; Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); when(taskClient.ack(any(), any())).thenReturn(true); //noinspection unchecked when(taskClient.evaluateAndUploadLargePayload(any(Map.class), any())) .thenReturn(Optional.of(largePayloadLocation)); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertNull(result.getOutputData()); assertEquals( largePayloadLocation, result.getExternalOutputPayloadStoragePath()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient, times(1)).updateTask(any()); } @Test public void testTaskPollException() throws InterruptedException { Task task = testTask(); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenThrow(ConductorClientException.class) .thenReturn(Arrays.asList(task)); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(IN_PROGRESS, result.getStatus()); assertEquals(task.getTaskId(), result.getTaskId()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient).updateTask(any()); } @Test public void testTaskPoll() throws InterruptedException { Task task = testTask(); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn("test"); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(IN_PROGRESS, result.getStatus()); assertEquals(task.getTaskId(), result.getTaskId()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient).updateTask(any()); } @Test public void testTaskPollDomain() throws InterruptedException { TaskClient taskClient = Mockito.mock(TaskClient.class); String testDomain = "foo"; Map<String, String> taskToDomain = new HashMap<>(); taskToDomain.put(TEST_TASK_DEF_NAME, testDomain); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, taskToDomain, "test-worker-", TASK_THREAD_MAP); String workerName = "test-worker"; Worker worker = mock(Worker.class); when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); when(worker.getIdentity()).thenReturn(workerName); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { latch.countDown(); return null; }) .when(taskClient) .batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient).batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt()); } @Test public void testPollOutOfDiscoveryForTask() throws InterruptedException { Task task = testTask(); EurekaClient client = mock(EurekaClient.class); when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UNKNOWN); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn("task_run_always"); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(new Task())) .thenReturn(Arrays.asList(task)); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( client, taskClient, 1, new HashMap<>(), "test-worker-", Collections.singletonMap("task_run_always", 1)); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(IN_PROGRESS, result.getStatus()); assertEquals(task.getTaskId(), result.getTaskId()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient).updateTask(any()); } @Test public void testPollOutOfDiscoveryAsDefaultFalseForTask() throws ExecutionException, InterruptedException { Task task = testTask(); EurekaClient client = mock(EurekaClient.class); when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UNKNOWN); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn("task_do_not_run_always"); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( client, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(IN_PROGRESS, result.getStatus()); assertEquals(task.getTaskId(), result.getTaskId()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); ScheduledFuture f = Executors.newSingleThreadScheduledExecutor() .schedule( () -> taskPollExecutor.pollAndExecute(worker), 0, TimeUnit.SECONDS); f.get(); verify(taskClient, times(0)).updateTask(any()); } @Test public void testPollOutOfDiscoveryAsExplicitFalseForTask() throws ExecutionException, InterruptedException { Task task = testTask(); EurekaClient client = mock(EurekaClient.class); when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UNKNOWN); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn("task_explicit_do_not_run_always"); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( client, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(IN_PROGRESS, result.getStatus()); assertEquals(task.getTaskId(), result.getTaskId()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); ScheduledFuture f = Executors.newSingleThreadScheduledExecutor() .schedule( () -> taskPollExecutor.pollAndExecute(worker), 0, TimeUnit.SECONDS); f.get(); verify(taskClient, times(0)).updateTask(any()); } @Test public void testPollOutOfDiscoveryIsIgnoredWhenDiscoveryIsUp() throws InterruptedException { Task task = testTask(); EurekaClient client = mock(EurekaClient.class); when(client.getInstanceRemoteStatus()).thenReturn(InstanceInfo.InstanceStatus.UP); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn("task_ignore_override"); when(worker.execute(any())).thenReturn(new TaskResult(task)); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( client, taskClient, 1, new HashMap<>(), "test-worker-", Collections.singletonMap("task_ignore_override", 1)); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(IN_PROGRESS, result.getStatus()); assertEquals(task.getTaskId(), result.getTaskId()); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient).updateTask(any()); } @Test public void testTaskThreadCount() throws InterruptedException { TaskClient taskClient = Mockito.mock(TaskClient.class); Map<String, Integer> taskThreadCount = new HashMap<>(); taskThreadCount.put(TEST_TASK_DEF_NAME, 1); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, -1, new HashMap<>(), "test-worker-", taskThreadCount); String workerName = "test-worker"; Worker worker = mock(Worker.class); when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); when(worker.getIdentity()).thenReturn(workerName); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { latch.countDown(); return null; }) .when(taskClient) .batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); verify(taskClient).batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt()); } @Test public void testTaskLeaseExtend() throws InterruptedException { Task task = testTask(); task.setResponseTimeoutSeconds(1); Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getTaskDefName()).thenReturn("test"); when(worker.execute(any())).thenReturn(new TaskResult(task)); when(worker.leaseExtendEnabled()).thenReturn(true); TaskClient taskClient = Mockito.mock(TaskClient.class); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenReturn(Arrays.asList(task)); TaskResult result = new TaskResult(task); result.getLogs().add(new TaskExecLog("lease extend")); result.setExtendLease(true); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-", TASK_THREAD_MAP); CountDownLatch latch = new CountDownLatch(1); doAnswer( invocation -> { assertTrue( taskPollExecutor.leaseExtendMap.containsKey(task.getTaskId())); latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 5, TimeUnit.SECONDS); latch.await(); } @Test public void testBatchTasksExecution() throws InterruptedException { int threadCount = 10; TaskClient taskClient = Mockito.mock(TaskClient.class); Map<String, Integer> taskThreadCount = new HashMap<>(); taskThreadCount.put(TEST_TASK_DEF_NAME, threadCount); String workerName = "test-worker"; Worker worker = mock(Worker.class); when(worker.getPollingInterval()).thenReturn(3000); when(worker.getBatchPollTimeoutInMS()).thenReturn(1000); when(worker.getTaskDefName()).thenReturn(TEST_TASK_DEF_NAME); when(worker.getIdentity()).thenReturn(workerName); List<Task> tasks = new ArrayList<>(); for (int i = 0; i < threadCount; i++) { Task task = testTask(); tasks.add(task); when(worker.execute(task)) .thenAnswer( new Answer() { Map<String, Object> outputMap = new HashMap<>(); public TaskResult answer(InvocationOnMock invocation) throws InterruptedException { // Sleep for 1 seconds to simulate task execution Thread.sleep(1000L); TaskResult taskResult = new TaskResult(task); outputMap.put("key", "value"); taskResult.setOutputData(outputMap); taskResult.setStatus(COMPLETED); return taskResult; } }); } when(taskClient.batchPollTasksInDomain( TEST_TASK_DEF_NAME, null, workerName, threadCount, 1000)) .thenReturn(tasks); when(taskClient.ack(any(), any())).thenReturn(true); TaskPollExecutor taskPollExecutor = new TaskPollExecutor( null, taskClient, 1, new HashMap<>(), "test-worker-", taskThreadCount); CountDownLatch latch = new CountDownLatch(threadCount); doAnswer( new Answer() { public TaskResult answer(InvocationOnMock invocation) { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(COMPLETED, result.getStatus()); assertEquals("value", result.getOutputData().get("key")); latch.countDown(); return null; } }) .when(taskClient) .updateTask(any()); Executors.newSingleThreadScheduledExecutor() .scheduleAtFixedRate( () -> taskPollExecutor.pollAndExecute(worker), 0, 1, TimeUnit.SECONDS); latch.await(); // execute() is called 10 times on the worker (once for each task) verify(worker, times(threadCount)).execute(any()); verify(taskClient, times(threadCount)).updateTask(any()); } private Task testTask() { Task task = new Task(); task.setTaskId(UUID.randomUUID().toString()); task.setStatus(Task.Status.IN_PROGRESS); task.setTaskDefName(TEST_TASK_DEF_NAME); return task; } }
6,992
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/automator/TaskRunnerConfigurerTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.automator; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import com.netflix.conductor.client.exception.ConductorClientException; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.worker.Worker; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskResult; import static com.netflix.conductor.common.metadata.tasks.TaskResult.Status.COMPLETED; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class TaskRunnerConfigurerTest { private static final String TEST_TASK_DEF_NAME = "test"; private TaskClient client; @Before public void setup() { client = Mockito.mock(TaskClient.class); } @Test(expected = NullPointerException.class) public void testNoWorkersException() { new TaskRunnerConfigurer.Builder(null, null).build(); } @Test(expected = ConductorClientException.class) public void testInvalidThreadConfig() { Worker worker1 = Worker.create("task1", TaskResult::new); Worker worker2 = Worker.create("task2", TaskResult::new); Map<String, Integer> taskThreadCount = new HashMap<>(); taskThreadCount.put(worker1.getTaskDefName(), 2); taskThreadCount.put(worker2.getTaskDefName(), 3); new TaskRunnerConfigurer.Builder(client, Arrays.asList(worker1, worker2)) .withThreadCount(10) .withTaskThreadCount(taskThreadCount) .build(); } @Test public void testMissingTaskThreadConfig() { Worker worker1 = Worker.create("task1", TaskResult::new); Worker worker2 = Worker.create("task2", TaskResult::new); Map<String, Integer> taskThreadCount = new HashMap<>(); taskThreadCount.put(worker1.getTaskDefName(), 2); TaskRunnerConfigurer configurer = new TaskRunnerConfigurer.Builder(client, Arrays.asList(worker1, worker2)) .withTaskThreadCount(taskThreadCount) .build(); assertFalse(configurer.getTaskThreadCount().isEmpty()); assertEquals(2, configurer.getTaskThreadCount().size()); assertEquals(2, configurer.getTaskThreadCount().get("task1").intValue()); assertEquals(1, configurer.getTaskThreadCount().get("task2").intValue()); } @Test public void testPerTaskThreadPool() { Worker worker1 = Worker.create("task1", TaskResult::new); Worker worker2 = Worker.create("task2", TaskResult::new); Map<String, Integer> taskThreadCount = new HashMap<>(); taskThreadCount.put(worker1.getTaskDefName(), 2); taskThreadCount.put(worker2.getTaskDefName(), 3); TaskRunnerConfigurer configurer = new TaskRunnerConfigurer.Builder(client, Arrays.asList(worker1, worker2)) .withTaskThreadCount(taskThreadCount) .build(); configurer.init(); assertEquals(-1, configurer.getThreadCount()); assertEquals(2, configurer.getTaskThreadCount().get("task1").intValue()); assertEquals(3, configurer.getTaskThreadCount().get("task2").intValue()); } @Test public void testSharedThreadPool() { Worker worker = Worker.create(TEST_TASK_DEF_NAME, TaskResult::new); TaskRunnerConfigurer configurer = new TaskRunnerConfigurer.Builder(client, Arrays.asList(worker, worker, worker)) .build(); configurer.init(); assertEquals(3, configurer.getThreadCount()); assertEquals(500, configurer.getSleepWhenRetry()); assertEquals(3, configurer.getUpdateRetryCount()); assertEquals(10, configurer.getShutdownGracePeriodSeconds()); assertFalse(configurer.getTaskThreadCount().isEmpty()); assertEquals(1, configurer.getTaskThreadCount().size()); assertEquals(3, configurer.getTaskThreadCount().get(TEST_TASK_DEF_NAME).intValue()); configurer = new TaskRunnerConfigurer.Builder(client, Collections.singletonList(worker)) .withThreadCount(100) .withSleepWhenRetry(100) .withUpdateRetryCount(10) .withShutdownGracePeriodSeconds(15) .withWorkerNamePrefix("test-worker-") .build(); assertEquals(100, configurer.getThreadCount()); configurer.init(); assertEquals(100, configurer.getThreadCount()); assertEquals(100, configurer.getSleepWhenRetry()); assertEquals(10, configurer.getUpdateRetryCount()); assertEquals(15, configurer.getShutdownGracePeriodSeconds()); assertEquals("test-worker-", configurer.getWorkerNamePrefix()); assertFalse(configurer.getTaskThreadCount().isEmpty()); assertEquals(1, configurer.getTaskThreadCount().size()); assertEquals(100, configurer.getTaskThreadCount().get(TEST_TASK_DEF_NAME).intValue()); } @Test public void testMultipleWorkersExecution() throws Exception { String task1Name = "task1"; Worker worker1 = mock(Worker.class); when(worker1.getPollingInterval()).thenReturn(3000); when(worker1.getTaskDefName()).thenReturn(task1Name); when(worker1.getIdentity()).thenReturn("worker1"); when(worker1.execute(any())) .thenAnswer( invocation -> { // Sleep for 2 seconds to simulate task execution Thread.sleep(2000); TaskResult taskResult = new TaskResult(); taskResult.setStatus(COMPLETED); return taskResult; }); String task2Name = "task2"; Worker worker2 = mock(Worker.class); when(worker2.getPollingInterval()).thenReturn(3000); when(worker2.getTaskDefName()).thenReturn(task2Name); when(worker2.getIdentity()).thenReturn("worker2"); when(worker2.execute(any())) .thenAnswer( invocation -> { // Sleep for 2 seconds to simulate task execution Thread.sleep(2000); TaskResult taskResult = new TaskResult(); taskResult.setStatus(COMPLETED); return taskResult; }); Task task1 = testTask(task1Name); Task task2 = testTask(task2Name); TaskClient taskClient = Mockito.mock(TaskClient.class); TaskRunnerConfigurer configurer = new TaskRunnerConfigurer.Builder(taskClient, Arrays.asList(worker1, worker2)) .withThreadCount(2) .withSleepWhenRetry(100000) .withUpdateRetryCount(1) .withWorkerNamePrefix("test-worker-") .build(); when(taskClient.batchPollTasksInDomain(any(), any(), any(), anyInt(), anyInt())) .thenAnswer( invocation -> { Object[] args = invocation.getArguments(); String taskName = args[0].toString(); if (taskName.equals(task1Name)) { return Arrays.asList(task1); } else if (taskName.equals(task2Name)) { return Arrays.asList(task2); } else { return Collections.emptyList(); } }); when(taskClient.ack(any(), any())).thenReturn(true); AtomicInteger task1Counter = new AtomicInteger(0); AtomicInteger task2Counter = new AtomicInteger(0); CountDownLatch latch = new CountDownLatch(2); doAnswer( invocation -> { Object[] args = invocation.getArguments(); TaskResult result = (TaskResult) args[0]; assertEquals(COMPLETED, result.getStatus()); if (result.getWorkerId().equals("worker1")) { task1Counter.incrementAndGet(); } else if (result.getWorkerId().equals("worker2")) { task2Counter.incrementAndGet(); } latch.countDown(); return null; }) .when(taskClient) .updateTask(any()); configurer.init(); latch.await(); assertEquals(1, task1Counter.get()); assertEquals(1, task2Counter.get()); } private Task testTask(String taskDefName) { Task task = new Task(); task.setTaskId(UUID.randomUUID().toString()); task.setStatus(Task.Status.IN_PROGRESS); task.setTaskDefName(taskDefName); return task; } }
6,993
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/automator/PollingSemaphoreTest.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.automator; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.stream.IntStream; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class PollingSemaphoreTest { @Test public void testBlockAfterAvailablePermitsExhausted() throws Exception { int threads = 5; ExecutorService executorService = Executors.newFixedThreadPool(threads); PollingSemaphore pollingSemaphore = new PollingSemaphore(threads); List<CompletableFuture<Void>> futuresList = new ArrayList<>(); IntStream.range(0, threads) .forEach( t -> futuresList.add( CompletableFuture.runAsync( () -> pollingSemaphore.acquireSlots(1), executorService))); CompletableFuture<Void> allFutures = CompletableFuture.allOf( futuresList.toArray(new CompletableFuture[futuresList.size()])); allFutures.get(); assertEquals(0, pollingSemaphore.availableSlots()); assertFalse(pollingSemaphore.acquireSlots(1)); executorService.shutdown(); } @Test public void testAllowsPollingWhenPermitBecomesAvailable() throws Exception { int threads = 5; ExecutorService executorService = Executors.newFixedThreadPool(threads); PollingSemaphore pollingSemaphore = new PollingSemaphore(threads); List<CompletableFuture<Void>> futuresList = new ArrayList<>(); IntStream.range(0, threads) .forEach( t -> futuresList.add( CompletableFuture.runAsync( () -> pollingSemaphore.acquireSlots(1), executorService))); CompletableFuture<Void> allFutures = CompletableFuture.allOf( futuresList.toArray(new CompletableFuture[futuresList.size()])); allFutures.get(); assertEquals(0, pollingSemaphore.availableSlots()); pollingSemaphore.complete(1); assertTrue(pollingSemaphore.availableSlots() > 0); assertTrue(pollingSemaphore.acquireSlots(1)); executorService.shutdown(); } }
6,994
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/sample/SampleWorker.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.sample; import com.netflix.conductor.client.worker.Worker; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.tasks.TaskResult.Status; public class SampleWorker implements Worker { private final String taskDefName; public SampleWorker(String taskDefName) { this.taskDefName = taskDefName; } @Override public String getTaskDefName() { return taskDefName; } @Override public TaskResult execute(Task task) { TaskResult result = new TaskResult(task); result.setStatus(Status.COMPLETED); // Register the output of the task result.getOutputData().put("outputKey1", "value"); result.getOutputData().put("oddEven", 1); result.getOutputData().put("mod", 4); return result; } }
6,995
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/sample/Main.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.sample; import java.util.Arrays; import com.netflix.conductor.client.automator.TaskRunnerConfigurer; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.worker.Worker; public class Main { public static void main(String[] args) { TaskClient taskClient = new TaskClient(); taskClient.setRootURI("http://localhost:8080/api/"); // Point this to the server API int threadCount = 2; // number of threads used to execute workers. To avoid starvation, should be // same or more than number of workers Worker worker1 = new SampleWorker("task_1"); Worker worker2 = new SampleWorker("task_5"); // Create TaskRunnerConfigurer TaskRunnerConfigurer configurer = new TaskRunnerConfigurer.Builder(taskClient, Arrays.asList(worker1, worker2)) .withThreadCount(threadCount) .build(); // Start the polling and execution of tasks configurer.init(); } }
6,996
0
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client
Create_ds/conductor/client/src/test/java/com/netflix/conductor/client/worker/TestWorkflowTask.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.worker; import java.io.InputStream; import java.util.List; import org.junit.Before; import org.junit.Test; import com.netflix.conductor.common.config.ObjectMapperProvider; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.fasterxml.jackson.databind.ObjectMapper; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; public class TestWorkflowTask { private ObjectMapper objectMapper; @Before public void setup() { objectMapper = new ObjectMapperProvider().getObjectMapper(); } @Test public void test() throws Exception { WorkflowTask task = new WorkflowTask(); task.setType("Hello"); task.setName("name"); String json = objectMapper.writeValueAsString(task); WorkflowTask read = objectMapper.readValue(json, WorkflowTask.class); assertNotNull(read); assertEquals(task.getName(), read.getName()); assertEquals(task.getType(), read.getType()); task = new WorkflowTask(); task.setWorkflowTaskType(TaskType.SUB_WORKFLOW); task.setName("name"); json = objectMapper.writeValueAsString(task); read = objectMapper.readValue(json, WorkflowTask.class); assertNotNull(read); assertEquals(task.getName(), read.getName()); assertEquals(task.getType(), read.getType()); assertEquals(TaskType.SUB_WORKFLOW.name(), read.getType()); } @SuppressWarnings("unchecked") @Test public void testObjectMapper() throws Exception { try (InputStream stream = TestWorkflowTask.class.getResourceAsStream("/tasks.json")) { List<Task> tasks = objectMapper.readValue(stream, List.class); assertNotNull(tasks); assertEquals(1, tasks.size()); } } }
6,997
0
Create_ds/conductor/client/src/main/java/com/netflix/conductor/client
Create_ds/conductor/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java
/* * Copyright 2020 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.config; /** * A default implementation of {@link ConductorClientConfiguration} where external payload storage * is disabled. */ public class DefaultConductorClientConfiguration implements ConductorClientConfiguration { @Override public int getWorkflowInputPayloadThresholdKB() { return 5120; } @Override public int getWorkflowInputMaxPayloadThresholdKB() { return 10240; } @Override public int getTaskOutputPayloadThresholdKB() { return 3072; } @Override public int getTaskOutputMaxPayloadThresholdKB() { return 10240; } @Override public boolean isExternalPayloadStorageEnabled() { return false; } }
6,998
0
Create_ds/conductor/client/src/main/java/com/netflix/conductor/client
Create_ds/conductor/client/src/main/java/com/netflix/conductor/client/config/ConductorClientConfiguration.java
/* * Copyright 2018 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.client.config; public interface ConductorClientConfiguration { /** * @return the workflow input payload size threshold in KB, beyond which the payload will be * processed based on {@link * ConductorClientConfiguration#isExternalPayloadStorageEnabled()}. */ int getWorkflowInputPayloadThresholdKB(); /** * @return the max value of workflow input payload size threshold in KB, beyond which the * payload will be rejected regardless external payload storage is enabled. */ int getWorkflowInputMaxPayloadThresholdKB(); /** * @return the task output payload size threshold in KB, beyond which the payload will be * processed based on {@link * ConductorClientConfiguration#isExternalPayloadStorageEnabled()}. */ int getTaskOutputPayloadThresholdKB(); /** * @return the max value of task output payload size threshold in KB, beyond which the payload * will be rejected regardless external payload storage is enabled. */ int getTaskOutputMaxPayloadThresholdKB(); /** * @return the flag which controls the use of external storage for storing workflow/task input * and output JSON payloads with size greater than threshold. If it is set to true, the * payload is stored in external location. If it is set to false, the payload is rejected * and the task/workflow execution fails. */ boolean isExternalPayloadStorageEnabled(); }
6,999