index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/karyon/karyon2-core/src/main/java/netflix/karyon | Create_ds/karyon/karyon2-core/src/main/java/netflix/karyon/health/AlwaysHealthyHealthCheck.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.karyon.health;
/**
* Default health check handler implementation which always returns healthy. There is always a single instance of this
* class which can be obtained via {@link #INSTANCE}
*
* @author Nitesh Kant
*/
public class AlwaysHealthyHealthCheck implements HealthCheckHandler {
public static final AlwaysHealthyHealthCheck INSTANCE = new AlwaysHealthyHealthCheck();
public AlwaysHealthyHealthCheck() {
}
@Override
public int getStatus() {
return 200;
}
}
| 3,300 |
0 | Create_ds/karyon/karyon2-admin-web/src/test/java/netflix | Create_ds/karyon/karyon2-admin-web/src/test/java/netflix/adminresources/WebAdminTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.adminresources;
import com.google.common.collect.ImmutableMap;
import com.netflix.config.ConfigurationManager;
import netflix.admin.AdminConfigImpl;
import netflix.adminresources.resources.MaskedResourceHelper;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.core.MediaType;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Amit Joshi
*/
public class WebAdminTest {
private static final Logger LOG = LoggerFactory.getLogger(WebAdminTest.class);
private static AdminResourcesContainer container;
private static int adminServerPort;
private static Map<String, String> REST_END_POINTS;
@BeforeClass
public static void setUp() throws Exception {
System.setProperty("AWS_SECRET_KEY", "super-secret-aws-key");
System.setProperty("AWS_ACCESS_ID", "super-aws-access-id");
System.setProperty(MaskedResourceHelper.MASKED_PROPERTY_NAMES, "AWS_SECRET_KEY");
System.setProperty(AdminConfigImpl.CONTAINER_LISTEN_PORT, "0");
adminServerPort = startServerAndGetListeningPort();
buildUpRestEndpointsToTest();
}
private static void buildUpRestEndpointsToTest() {
final String localhostUrlBase = String.format("http://localhost:%d/", adminServerPort);
REST_END_POINTS = new ImmutableMap.Builder<String, String>()
.put(localhostUrlBase + "webadmin/archprops", MediaType.APPLICATION_JSON)
.put(localhostUrlBase + "admin/archprops", MediaType.TEXT_HTML)
.put(localhostUrlBase + "admin/env", MediaType.TEXT_HTML)
.put(localhostUrlBase + "webadmin/env", MediaType.APPLICATION_JSON)
.put(localhostUrlBase + "admin/jars", MediaType.TEXT_HTML)
.put(localhostUrlBase + "webadmin/jars", MediaType.APPLICATION_JSON)
.put(localhostUrlBase + "webadmin/jmx?key=root&_=1366497431351", MediaType.APPLICATION_JSON)
.put(localhostUrlBase + "admin/jmx", MediaType.TEXT_HTML)
.build();
}
@AfterClass
public static void tearDown() throws Exception {
ConfigurationManager.getConfigInstance().clearProperty("AWS_SECRET_KEY");
ConfigurationManager.getConfigInstance().clearProperty("AWS_ACCESS_ID");
ConfigurationManager.getConfigInstance().clearProperty(MaskedResourceHelper.MASKED_PROPERTY_NAMES);
if (container != null) {
container.shutdown();
}
}
@Test
public void testRestEndPoints() throws Exception {
HttpClient client = new DefaultHttpClient();
for (Map.Entry<String, String> restEndPoint : REST_END_POINTS.entrySet()) {
final String endPoint = restEndPoint.getKey();
LOG.info("REST endpoint " + endPoint);
HttpGet restGet = new HttpGet(endPoint);
HttpResponse response = client.execute(restGet);
assertEquals(200, response.getStatusLine().getStatusCode());
assertEquals(restEndPoint.getValue(), response.getEntity().getContentType().getValue());
// need to consume full response before make another rest call with
// the default SingleClientConnManager used with DefaultHttpClient
EntityUtils.consume(response.getEntity());
}
}
@Test
public void testInvalidEndpoint() throws Exception {
final String localhostUrlBase = String.format("http://localhost:%d/", adminServerPort);
final HttpClient client = new DefaultHttpClient();
HttpGet badGet = new HttpGet(localhostUrlBase + "/admin/not-there");
final HttpResponse resp = client.execute(badGet);
assertEquals(404, resp.getStatusLine().getStatusCode());
}
@Test
public void testMaskedResources() throws Exception {
HttpClient client = new DefaultHttpClient();
final String endPoint = String.format("http://localhost:%d/webadmin/archprops", adminServerPort);
LOG.info("REST endpoint " + endPoint);
HttpGet restGet = new HttpGet(endPoint);
HttpResponse response = client.execute(restGet);
assertEquals(200, response.getStatusLine().getStatusCode());
assertEquals(MediaType.APPLICATION_JSON, response.getEntity().getContentType().getValue());
String responseStr = EntityUtils.toString(response.getEntity());
LOG.info("responseStr: " + responseStr);
assertTrue(responseStr.contains("[\"AWS_SECRET_KEY\",\"" + MaskedResourceHelper.MASKED_PROPERTY_VALUE + "\"]"));
assertTrue(responseStr.contains("[\"AWS_ACCESS_ID\",\"super-aws-access-id\"]"));
// need to consume full response before make another rest call with
// the default SingleClientConnManager used with DefaultHttpClient
EntityUtils.consume(response.getEntity());
}
private static int startServerAndGetListeningPort() throws Exception {
container = new AdminResourcesContainer();
container.init();
return container.getServerPort();
}
}
| 3,301 |
0 | Create_ds/karyon/karyon2-admin-web/src/test/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/test/java/netflix/adminresources/tableview/PropsTableViewTest.java | package netflix.adminresources.tableview;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import org.junit.Test;
import java.util.List;
import static junit.framework.Assert.assertTrue;
public class PropsTableViewTest {
public static final String ALL_COLS_SEARCH_STR = "java";
public static final String PROP_NAME_SEARCH_STR = "java";
public static final String PROP_VALUE_SEARCH_STR = "java";
@Test
public void verifyColumnNames() {
final PropsTableView ptv = new PropsTableView();
final List<String> columns = ptv.getColumns();
assertTrue(columns != null);
assertTrue(columns.size() == 2);
assertTrue(columns.get(0).equals("Key"));
assertTrue(columns.get(1).equals("Value"));
}
@Test
public void getDataTest() {
final PropsTableView ptv = new PropsTableView();
final JsonArray data = ptv.getData();
assertTrue(data != null);
assertTrue(data.size() > 0);
}
@Test
public void searchTermTest() {
final PropsTableView ptv = new PropsTableView();
ptv.setAllColumnsSearchTerm(ALL_COLS_SEARCH_STR);
final JsonArray data = ptv.getData();
assertTrue(data != null);
int totalElms = data.size();
assertTrue(totalElms > 0);
for (int i = 0; i < totalElms; i++) {
final JsonElement propElm = data.get(i);
final JsonArray propKVArray = propElm.getAsJsonArray();
assertTrue(propKVArray.size() == 2);
final String propKey = propKVArray.get(0).getAsString().toLowerCase();
final String propValue = propKVArray.get(1).getAsString().toLowerCase();
assertTrue("Property " + propKey + " does not contain " + ALL_COLS_SEARCH_STR, (propKey.contains(ALL_COLS_SEARCH_STR) || propValue.contains(ALL_COLS_SEARCH_STR)));
}
}
@Test
public void propNameSearchTest() {
final PropsTableView ptv = new PropsTableView();
ptv.setColumnSearchTerm(PropsTableView.KEY, PROP_NAME_SEARCH_STR);
final JsonArray data = ptv.getData();
assertTrue(data != null);
int totalElms = data.size();
assertTrue(totalElms > 0);
for (int i = 0; i < totalElms; i++) {
final JsonElement propElm = data.get(i);
final JsonArray propKVArray = propElm.getAsJsonArray();
assertTrue(propKVArray.size() == 2);
final String propKey = propKVArray.get(0).getAsString().toLowerCase();
assertTrue("Property " + propKey + " does not contain " + PROP_NAME_SEARCH_STR, (propKey.contains(PROP_NAME_SEARCH_STR)));
}
}
@Test
public void propValueSearchTest() {
final PropsTableView ptv = new PropsTableView();
ptv.setColumnSearchTerm(PropsTableView.VALUE, PROP_VALUE_SEARCH_STR);
final JsonArray data = ptv.getData();
assertTrue(data != null);
int totalElms = data.size();
assertTrue(totalElms > 0);
for (int i = 0; i < totalElms; i++) {
final JsonElement propElm = data.get(i);
final JsonArray propKVArray = propElm.getAsJsonArray();
assertTrue(propKVArray.size() == 2);
final String propValue = propKVArray.get(1).getAsString().toLowerCase();
assertTrue("Property " + propValue + " does not contain " + PROP_VALUE_SEARCH_STR, (propValue.contains(PROP_VALUE_SEARCH_STR)));
}
}
@Test
public void paginationTest() {
final PropsTableView ptv = new PropsTableView();
ptv.setCurrentPageInfo(0, 10);
final JsonArray data = ptv.getData();
assertTrue(data != null);
int totalElms = data.size();
assertTrue(totalElms == 10);
ptv.setCurrentPageInfo(11, 10);
final JsonArray nextPageData = ptv.getData();
assertTrue(nextPageData != null);
totalElms = nextPageData.size();
assertTrue(totalElms == 10);
}
@Test
public void sortKeyAscendingTest() {
final PropsTableView ptv = new PropsTableView();
ptv.enableColumnSort(PropsTableView.KEY, false); // ascending sort
final JsonArray data = ptv.getData();
assertTrue(data != null);
int totalElms = data.size();
assertTrue(totalElms > 0);
String prevKey = null;
for (int i = 0; i < totalElms; i++) {
final JsonElement propElm = data.get(i);
final JsonArray propKVArray = propElm.getAsJsonArray();
final String propKey = propKVArray.get(0).getAsString();
assertTrue(propKVArray.size() == 2);
if (prevKey == null) {
prevKey = propKey;
} else {
// verify sorting order
assertTrue(prevKey.compareTo(propKey) < 0);
prevKey = propKey;
}
}
}
@Test
public void sortValueDescending() {
final PropsTableView ptv = new PropsTableView();
ptv.enableColumnSort(PropsTableView.VALUE, true); // descending on value
final JsonArray data = ptv.getData();
assertTrue(data != null);
int totalElms = data.size();
assertTrue(totalElms > 0);
String prevValue = null;
for (int i = 0; i < totalElms; i++) {
final JsonElement propElm = data.get(i);
final JsonArray propKVArray = propElm.getAsJsonArray();
final String propValue = propKVArray.get(1).getAsString();
assertTrue(propKVArray.size() == 2);
if (prevValue == null) {
prevValue = propValue;
} else {
// verify sorting order
assertTrue(String.format("%s - %s sort failed", prevValue, propValue), prevValue.compareTo(propValue) >= 0);
prevValue = propValue;
}
}
}
}
| 3,302 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/tableview/TableViewResource.java | package netflix.adminresources.tableview;
import com.google.gson.JsonArray;
import java.util.List;
public interface TableViewResource {
List<String> getColumns();
TableViewResource setColumnSearchTerm(String column, String term);
TableViewResource setAllColumnsSearchTerm(String term);
TableViewResource enableColumnSort(String column, boolean isDescending);
JsonArray getData();
int getTotalNumOfRecords();
int getFilteredNumOfRecords();
TableViewResource setCurrentPageInfo(int startIndex, int count);
}
| 3,303 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/tableview/PropsTableView.java | package netflix.adminresources.tableview;
import com.google.common.collect.Lists;
import com.google.gson.JsonArray;
import com.google.gson.JsonPrimitive;
import netflix.adminresources.resources.PropertiesHelper;
import netflix.adminresources.resources.model.Property;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class PropsTableView implements TableViewResource {
private static final Logger LOG = LoggerFactory.getLogger(PropsTableView.class);
private final Map<String, Column> columnMap = new LinkedHashMap<String, Column>();
public static final String KEY = "Key";
public static final String VALUE = "Value";
private static class Column {
String name;
String searchTerm;
boolean enableSort;
boolean isDescendingSort;
}
private static class PageInfo {
int startIndex;
int count;
}
public PropsTableView() {
LOG.debug("Creating a new propsTableViewResource ");
Column keyCol = new Column();
keyCol.name = KEY;
Column valCol = new Column();
valCol.name = VALUE;
columnMap.put(KEY, keyCol);
columnMap.put(VALUE, valCol);
}
private String allColumnSearchTerm;
private PageInfo currentPage;
private int totalRecords;
private int numFilteredRecords;
@Override
public List<String> getColumns() {
return new ArrayList<String>(columnMap.keySet());
}
@Override
public TableViewResource setColumnSearchTerm(String column, String term) {
if (columnMap.containsKey(column)) {
columnMap.get(column).searchTerm = term;
}
return this;
}
@Override
public TableViewResource setAllColumnsSearchTerm(String term) {
allColumnSearchTerm = term;
return this;
}
@Override
public TableViewResource enableColumnSort(String column, boolean isDescending) {
if (columnMap.containsKey(column)) {
columnMap.get(column).isDescendingSort = isDescending;
columnMap.get(column).enableSort = true;
}
return this;
}
@Override
public JsonArray getData() {
JsonArray props = new JsonArray();
List<Property> properties = PropertiesHelper.getAllProperties();
totalRecords = properties.size();
numFilteredRecords = totalRecords;
List<Property> propsFiltered = applyFilter(properties);
List<Property> propsSorted = applySorting(propsFiltered);
List<Property> propsCurrentPage = applyPagination(propsSorted);
for (Property property : propsCurrentPage) {
JsonArray propArr = new JsonArray();
JsonPrimitive propName = new JsonPrimitive(property.getName());
JsonPrimitive propValue = new JsonPrimitive(property.getValue());
propArr.add(propName);
propArr.add(propValue);
props.add(propArr);
}
return props;
}
@Override
public int getTotalNumOfRecords() {
return totalRecords;
}
@Override
public int getFilteredNumOfRecords() {
return numFilteredRecords;
}
@Override
public TableViewResource setCurrentPageInfo(int startIndex, int count) {
PageInfo pageInfo = new PageInfo();
pageInfo.count = count;
pageInfo.startIndex = startIndex;
currentPage = pageInfo;
return this;
}
private List<Property> applyPagination(List<Property> properties) {
if (!isPaginated()) {
return properties;
}
List<Property> propertyList = Lists.newArrayList();
int index = 0;
int endIndex = currentPage.startIndex + currentPage.count;
for (Property property : properties) {
if (index >= currentPage.startIndex && index < endIndex) {
propertyList.add(property);
}
if (index >= endIndex) {
// no need to scan further elements
break;
}
index++;
}
return propertyList;
}
private boolean isPaginated() {
return currentPage != null;
}
private List<Property> applyFilter(List<Property> propsCurrentPage) {
if (allColumnSearchTerm != null && !allColumnSearchTerm.isEmpty()) {
return applyAllColumnsFilter(propsCurrentPage);
} else if (columnSearchTermExists()) {
return applyColumnFilters(propsCurrentPage);
}
return propsCurrentPage;
}
private List<Property> applyColumnFilters(List<Property> propsCurrentPage) {
final String propKeySearchTerm = getPropKeySearchTerm();
final String propValueSearchTerm = getPropValueSearchTerm();
List<Property> filteredList = Lists.newArrayList();
int index = 0;
for (Property property : propsCurrentPage) {
String propName = property.getName().toLowerCase();
String propValue = property.getValue().toLowerCase();
boolean matched = true;
if (propKeySearchTerm != null) {
matched = propName.contains(propKeySearchTerm);
}
if (propValueSearchTerm != null) {
matched = matched && (propValue.contains(propValueSearchTerm));
}
if (matched) {
filteredList.add(property);
index++;
}
}
numFilteredRecords = index;
return filteredList;
}
private List<Property> applyAllColumnsFilter(List<Property> propsCurrentPage) {
final String searchTermLowerCase = allColumnSearchTerm.toLowerCase();
List<Property> filteredList = Lists.newArrayList();
int index = 0;
for (Property property : propsCurrentPage) {
String propName = property.getName().toLowerCase();
String propValue = property.getValue().toLowerCase();
if (propName.contains(searchTermLowerCase) || propValue.contains(searchTermLowerCase)) {
filteredList.add(property);
index++;
}
}
numFilteredRecords = index;
return filteredList;
}
private boolean columnSearchTermExists() {
for (Map.Entry<String, Column> columnEntry : columnMap.entrySet()) {
final Column col = columnEntry.getValue();
if (col.searchTerm != null) {
return true;
}
}
return false;
}
private String getPropKeySearchTerm() {
return columnMap.get(KEY).searchTerm;
}
private String getPropValueSearchTerm() {
return columnMap.get(VALUE).searchTerm;
}
private List<Property> applySorting(List<Property> propsFiltered) {
Column sortOnColumn = null;
for (Column column : columnMap.values()) {
if (column.enableSort) {
sortOnColumn = column;
break;
}
}
if (sortOnColumn != null) {
final boolean sortOnKey = sortOnColumn.name.equals(KEY);
final boolean isDescending = sortOnColumn.isDescendingSort;
Collections.sort(propsFiltered, new Comparator<Property>() {
@Override
public int compare(Property property, Property property2) {
if (sortOnKey) {
if (isDescending) {
return property2.getName().compareTo(property.getName());
}
return property.getName().compareTo(property2.getName());
} else {
// sort on value
if (isDescending) {
return property2.getValue().compareTo(property.getValue());
}
return property.getValue().compareTo(property2.getValue());
}
}
});
}
return propsFiltered;
}
}
| 3,304 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/tableview/DataTableHelper.java | package netflix.adminresources.tableview;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import javax.ws.rs.core.MultivaluedMap;
public class DataTableHelper {
public static JsonObject buildOutput(TableViewResource tableViewResource, MultivaluedMap<String, String> queryParams) {
JsonObject output = new JsonObject();
applyQueryParams(tableViewResource, queryParams);
JsonArray data = tableViewResource.getData();
final String sEcho = queryParams.getFirst("sEcho");
output.addProperty("iTotalRecords", tableViewResource.getTotalNumOfRecords());
output.addProperty("iTotalDisplayRecords", tableViewResource.getFilteredNumOfRecords());
output.addProperty("sEcho", sEcho);
output.add("aaData", data);
return output;
}
/**
* apply pagination, search, sort params
* <p/>
* Sample query from DataTables -
* sEcho=1&iColumns=2&sColumns=&iDisplayStart=0&iDisplayLength=25&mDataProp_0=0&mDataProp_1=1&sSearch=&
* bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&
* iSortingCols=1&iSortCol_0=0&sSortDir_0=asc&bSortable_0=true&bSortable_1=true
*/
private static void applyQueryParams(TableViewResource resource, MultivaluedMap<String, String> queryParams) {
final String allColsSearch = queryParams.getFirst("sSearch");
final String displayStart = queryParams.getFirst("iDisplayStart");
final String displayLen = queryParams.getFirst("iDisplayLength");
String sortColumnIndex = queryParams.getFirst("iSortCol_0");
String sortColumnDir = queryParams.getFirst("sSortDir_0");
if (sortColumnDir == null || sortColumnIndex == null) {
// defaults
sortColumnDir = "asc";
sortColumnIndex = "0";
}
int colIndex = Integer.parseInt(sortColumnIndex);
String sortColumnName = resource.getColumns().get(colIndex);
if (displayLen != null && displayStart != null) {
final int iDisplayLen = Integer.parseInt(displayLen);
final int iDisplayStart = Integer.parseInt(displayStart);
resource.setAllColumnsSearchTerm(allColsSearch)
.setCurrentPageInfo(iDisplayStart, iDisplayLen)
.enableColumnSort(sortColumnName, !(sortColumnDir.equalsIgnoreCase("asc")));
}
}
}
| 3,305 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/EnvironmentResource.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.adminresources.resources;
import com.google.common.annotations.Beta;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* @author Nitesh Kant
*/
@Path("/env")
@Beta
@Produces(MediaType.APPLICATION_JSON)
public class EnvironmentResource {
public static class EnvResponse {
private Map<String, String> env;
public EnvResponse(Map<String, String> env) {
this.env = env;
}
public Map<String, String> getEnv() {
return env;
}
}
@GET
public Response getEnvironmentVars() {
// make a writable copy of the immutable System.getenv() map
Map<String, String> envVarsMap = new HashMap<String, String>(System.getenv());
// mask the specified properties if they're in the envVarsMap
Set<String> maskedProperties = MaskedResourceHelper.getMaskedPropertiesSet();
Iterator<String> maskedResourcesIter = maskedProperties.iterator();
while (maskedResourcesIter.hasNext()) {
String maskedResource = maskedResourcesIter.next();
if (envVarsMap.containsKey(maskedResource)) {
envVarsMap.put(maskedResource, MaskedResourceHelper.MASKED_PROPERTY_VALUE);
}
}
GsonBuilder gsonBuilder = new GsonBuilder().serializeNulls();
Gson gson = gsonBuilder.create();
String envJson = gson.toJson(new EnvResponse(envVarsMap));
return Response.ok(envJson).build();
}
}
| 3,306 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/PropertiesHelper.java | package netflix.adminresources.resources;
import com.netflix.config.ConfigurationManager;
import netflix.adminresources.resources.model.Property;
import org.apache.commons.configuration.AbstractConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class PropertiesHelper {
private static final Logger LOG = LoggerFactory.getLogger(PropertiesHelper.class);
public static List<Property> getAllProperties() {
final AbstractConfiguration configInstance = ConfigurationManager.getConfigInstance();
List<Property> properties = new ArrayList<Property>();
Iterator keys = null;
try {
keys = configInstance.getKeys();
} catch (Exception e) {
LOG.error("Exception fetching all property keys ", e);
}
Set<String> maskedResources = MaskedResourceHelper.getMaskedPropertiesSet();
while (keys.hasNext()) {
final String key = (String) keys.next();
try {
Object value = null;
// mask the specified properties
if (maskedResources.contains(key)) {
value = MaskedResourceHelper.MASKED_PROPERTY_VALUE;
} else {
value = configInstance.getProperty(key);
}
Property property = new Property(key, value.toString(), null);
properties.add(property);
} catch (Exception e) {
LOG.info("Exception fetching property value for key " + key, e);
}
}
return properties;
}
public static Map<String, String> buildPropertiesMap(List<Property> properties) {
Map<String, String> propsMap = new HashMap<>();
for (Property prop : properties) {
propsMap.put(prop.getName(), prop.getValue());
}
return propsMap;
}
}
| 3,307 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/PropertiesResource.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.adminresources.resources;
import com.google.common.annotations.Beta;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.inject.Inject;
import netflix.adminresources.tableview.DataTableHelper;
import netflix.adminresources.tableview.PropsTableView;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
@Beta
@Path("/archprops")
@Produces(MediaType.APPLICATION_JSON)
public class PropertiesResource {
private static final Logger LOG = LoggerFactory.getLogger(PropertiesResource.class);
@Inject(optional = true)
private PropsTableView adminPropsResource;
@GET
public Response getProperties(@Context UriInfo uriInfo) {
if (adminPropsResource != null) {
MultivaluedMap<String, String> queryParams = uriInfo.getQueryParameters();
JsonObject output = DataTableHelper.buildOutput(adminPropsResource, queryParams);
return Response.ok().entity(new Gson().toJson(output)).build();
} else {
return Response.status(Response.Status.BAD_REQUEST).build();
}
}
}
| 3,308 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/PairResponse.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.adminresources.resources;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author Nitesh Kant
*/
public class PairResponse {
List<Pair> data;
public PairResponse() {
data = new ArrayList<Pair>();
}
public PairResponse(Map<String, ?> input) {
this();
for (Map.Entry<String, ?> entry : input.entrySet()) {
data.add(new Pair(String.valueOf(entry.getKey()), entry.getValue()));
}
}
public void addEntry(String name, String value) {
data.add(new Pair(name, value));
}
private static class Pair {
private String name;
private Object value;
private Pair(String name, Object value) {
this.name = name;
this.value = value;
}
public String getName() {
return name;
}
public Object getValue() {
return value;
}
}
}
| 3,309 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/AllPropsResource.java | package netflix.adminresources.resources;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import netflix.adminresources.resources.model.Property;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
import java.util.Map;
@Path("/allprops")
@Produces(MediaType.APPLICATION_JSON)
public class AllPropsResource {
public static class PropsResponse {
private Map<String, String> props;
public PropsResponse(Map<String, String> props) {
this.props = props;
}
public Map<String, String> getProps() {
return props;
}
}
@GET
public Response getAllProperties() {
final List<Property> allProperties = PropertiesHelper.getAllProperties();
GsonBuilder gsonBuilder = new GsonBuilder().serializeNulls();
Gson gson = gsonBuilder.create();
String propsJson = gson.toJson(new PropsResponse(PropertiesHelper.buildPropertiesMap(allProperties)));
return Response.ok(propsJson).build();
}
}
| 3,310 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/GuiceResource.java | package netflix.adminresources.resources;
import com.google.gson.GsonBuilder;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.netflix.governator.ProvisionMetrics;
import com.netflix.governator.ProvisionMetrics.Element;
import com.netflix.governator.ProvisionMetrics.Visitor;
import java.io.ByteArrayOutputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import java.util.concurrent.TimeUnit;
import javax.inject.Singleton;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path("/guice")
@Produces(MediaType.APPLICATION_JSON)
@Singleton
public final class GuiceResource {
@Inject
Injector injector;
@Inject
JsonGrapher grapher;
@Inject
ProvisionMetrics metrics;
@GET
@Path("keys")
public String get() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter out = new PrintWriter(baos);
grapher.setOut(out);
grapher.graph(injector.getParent());
return baos.toString("UTF-8");
}
public static class Node {
private final String name;
private final List<Node> children;
private final long value;
public Node(String name, List<Node> children, long value) {
this.name = name;
this.children = children;
this.value = value < 0 ? 0 : value;
}
public String getName() {
return name;
}
public List<Node> getChildren() {
return children;
}
public long getValue() {
return value;
}
}
@GET
@Path("metrics")
public String getProvision() throws Exception {
return new GsonBuilder()
.serializeNulls()
.create()
.toJson(list());
}
public Node list() {
final List<Node> result = new ArrayList<>();
final Stack<List<Node>> stack = new Stack<>();
stack.push(result);
metrics.accept(new Visitor() {
@Override
public void visit(Element entry) {
final List<Node> children = new ArrayList<Node>();
stack.peek().add(new Node(entry.getKey().toString(), children, entry.getTotalDuration(TimeUnit.MILLISECONDS)));
stack.push(children);
entry.accept(this);
stack.pop();
}
});
long duration = 0;
for (Node node : result) {
duration += node.getValue();
}
return new Node("app", result, duration);
}
}
| 3,311 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/JarsInfoResource.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.adminresources.resources;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.annotations.Beta;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author pkamath
* @author Nitesh Kant
*/
@Beta
@Path("/jars")
@Produces(MediaType.APPLICATION_JSON)
public class JarsInfoResource {
private static final Logger logger = LoggerFactory.getLogger(JarsInfoResource.class);
private static final String JAR_PATTERN = "^jar:file:(.+)!/META-INF/MANIFEST.MF$";
private final List<JarManifest> jarManifests;
private final ArrayList<JarInfo> jarInfos;
public JarsInfoResource() {
jarManifests = loadJarManifests();
jarInfos = new ArrayList<>();
for (JarManifest jm : jarManifests) {
jarInfos.add(jm.toJarInfo());
}
}
public static class JarsInfoResponse {
private List<JarInfo> jars;
public JarsInfoResponse(List<JarInfo> jarInfos) {
this.jars = jarInfos;
}
public List<JarInfo> getJars() {
return jars;
}
}
@GET
public Response getAllJarsInfo() {
GsonBuilder gsonBuilder = new GsonBuilder().serializeNulls();
Gson gson = gsonBuilder.create();
String jarsJson = gson.toJson(new JarsInfoResponse(jarInfos));
return Response.ok(jarsJson).build();
}
@GET
@Path("/{id}")
public Response getJarManifest(@PathParam("id") int jarId) {
GsonBuilder gsonBuilder = new GsonBuilder().serializeNulls();
Gson gson = gsonBuilder.create();
String propsJson = gson.toJson(new KaryonAdminResponse(jarManifests.get(jarId)));
return Response.ok(propsJson).build();
}
private static List<JarManifest> loadJarManifests() {
List<JarManifest> jarManifests = new ArrayList<>();
Pattern pattern = Pattern.compile(JAR_PATTERN);
try {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Enumeration<URL> urls = cl.getResources("META-INF/MANIFEST.MF");
int id = 0;
while (urls.hasMoreElements()) {
URL manifestURL = urls.nextElement();
InputStream is = manifestURL.openStream();
String key = manifestURL.toString();
Matcher matcher = pattern.matcher(key);
if (matcher.matches()) {
key = matcher.group(1);
}
jarManifests.add(new JarManifest(id, key, new Manifest(is)));
is.close();
id++;
}
} catch (Exception e) {
logger.error("Failed to load environment jar information.", e);
}
return jarManifests;
}
private static class JarManifest {
private final int id;
private final String jarName;
private final Manifest manifest;
private JarManifest(int id, String jarName, Manifest manifest) {
this.id = id;
this.jarName = jarName;
this.manifest = manifest;
}
public String getJarName() {
return jarName;
}
public Manifest getManifest() {
return manifest;
}
public JarInfo toJarInfo() {
return new JarInfo(id, jarName, manifest.getMainAttributes());
}
}
private static class JarInfo {
public static final String LIBRARY_OWNER = "Library-Owner";
public static final String BUILD_DATE = "Build-Date";
public static final String STATUS = "Status";
public static final String IMPLEMENTATION_VERSION = "Implementation-Version";
public static final String IMPLEMENTATION_TITLE = "Implementation-Title";
public static final String SPECIFICATION_VERSION = "Specification-Version";
public static final String UNAVAILABLE = "-";
private final int id;
private final String name;
private final String libraryOwner;
private final String buildDate;
private final String status;
private final String implementationVersion;
private final String implementationTitle;
private final String specificationVersion;
private JarInfo(int id, String jar, Attributes mainAttributes) {
this.id = id;
this.name = jar;
libraryOwner = valueOf(mainAttributes, LIBRARY_OWNER);
buildDate = valueOf(mainAttributes, BUILD_DATE);
status = valueOf(mainAttributes, STATUS);
implementationTitle = valueOf(mainAttributes, IMPLEMENTATION_TITLE);
implementationVersion = valueOf(mainAttributes, IMPLEMENTATION_VERSION);
specificationVersion = valueOf(mainAttributes, SPECIFICATION_VERSION);
}
public String getStatus() {
return status;
}
public String getLibraryOwner() {
return libraryOwner;
}
public String getBuildDate() {
return buildDate;
}
public String getName() {
return name;
}
public String getImplementationVersion() {
return implementationVersion;
}
public String getImplementationTitle() {
return implementationTitle;
}
public String getSpecificationVersion() {
return specificationVersion;
}
private static String valueOf(Attributes mainAttributes, String tag) {
String value = mainAttributes.getValue(tag);
return value == null ? UNAVAILABLE : value;
}
}
}
| 3,312 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/JsonGrapher.java | package netflix.adminresources.resources;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.collect.Maps;
import com.google.inject.Inject;
import com.google.inject.Key;
import com.google.inject.grapher.BindingEdge;
import com.google.inject.grapher.DependencyEdge;
import com.google.inject.grapher.ImplementationNode;
import com.google.inject.grapher.InstanceNode;
import com.google.inject.grapher.InterfaceNode;
import com.google.inject.grapher.KaryonAbstractInjectorGrapher;
import com.google.inject.grapher.NameFactory;
import com.google.inject.grapher.Node;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
final class JsonGrapher extends KaryonAbstractInjectorGrapher {
private final Map<Key<?>, GraphNode> nodes = Maps.newHashMap();
private final NameFactory nameFactory;
private PrintWriter out;
private enum Type {
Instance,
Implementation,
Interface
}
@SuppressWarnings("unused")
private class GraphNode {
private final List<GraphNode> boundTo = new ArrayList<>();
private final Node node;
private final Type type;
private final List<GraphNode> dependencies = new ArrayList<>();
private long duration;
private long order = -1;
public GraphNode(Node node, Type type) {
this.node = node;
this.type = type;
}
public void boundTo(GraphNode id) {
if (!getName().equals(id.getName())) {
this.boundTo.add(id);
}
}
public void dependsOn(GraphNode id) {
this.dependencies.add(id);
}
public String getAnnotation() {
return nameFactory.getAnnotationName(node.getId().getKey());
}
public Type getType() {
return type;
}
public String getSource() {
return nameFactory.getSourceName(node.getSource());
}
public String getName() {
String annot = nameFactory.getAnnotationName(node.getId().getKey());
return annot.isEmpty()
? nameFactory.getClassName(node.getId().getKey())
: nameFactory.getClassName(node.getId().getKey()) + ":" + annot;
}
public List<String> getDependencies() {
List<String> dep = new ArrayList<>();
for (GraphNode node : dependencies) {
if (node != null) {
dep.add(node.getName());
}
}
return dep;
}
public List<String> getBoundTo() {
List<String> dep = new ArrayList<>();
for (GraphNode node : boundTo) {
dep.add(node.getName());
}
return dep;
}
public void setDuration(long duration) {
this.duration = duration;
}
public long getDuration() {
return duration;
}
public void setOrder(int order) {
this.order = order;
}
public long getOrder() {
return this.order;
}
}
@Inject
JsonGrapher(NameFactory nameFactory) {
this.nameFactory = nameFactory;
}
@Override
protected void reset() {
nodes.clear();
}
@Override
protected void postProcess() throws IOException {
ObjectMapper m = new ObjectMapper();
m.writer()
.with(SerializationFeature.INDENT_OUTPUT)
.writeValue(out, nodes.values());
}
public String quotes(String str) {
return "\"" + str + "\"";
}
@Override
protected void newInterfaceNode(InterfaceNode node) {
nodes.put(node.getId().getKey(), new GraphNode(node, Type.Interface));
}
@Override
protected void newImplementationNode(ImplementationNode node) {
nodes.put(node.getId().getKey(), new GraphNode(node, Type.Implementation));
}
@Override
protected void newInstanceNode(InstanceNode node) {
nodes.put(node.getId().getKey(), new GraphNode(node, Type.Instance));
}
@Override
protected void newDependencyEdge(DependencyEdge edge) {
nodes.get(edge.getFromId().getKey())
.dependsOn(nodes.get(edge.getToId().getKey()));
}
@Override
protected void newBindingEdge(BindingEdge edge) {
nodes.get(edge.getFromId().getKey())
.boundTo(nodes.get(edge.getToId().getKey()));
}
public void setOut(PrintWriter out) {
this.out = out;
}
} | 3,313 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/KaryonAdminResponse.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.adminresources.resources;
/**
* @author Nitesh Kant
*/
public class KaryonAdminResponse {
private Object data;
public KaryonAdminResponse(Object data) {
this.data = data;
}
public Object getData() {
return data;
}
}
| 3,314 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/KaryonWebAdminModule.java | package netflix.adminresources.resources;
import netflix.adminresources.KaryonAdminModule;
public class KaryonWebAdminModule extends KaryonAdminModule {
@Override
protected void configure() {
super.configure();
}
}
| 3,315 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/MaskedResourceHelper.java | package netflix.adminresources.resources;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Splitter;
import com.netflix.config.DynamicPropertyFactory;
import java.util.HashSet;
import java.util.Set;
public class MaskedResourceHelper {
@VisibleForTesting
public static final String MASKED_PROPERTY_NAMES = "netflix.platform.admin.resources.masked.property.names";
public static final String MASKED_ENV_NAMES = "netflix.platform.admin.resources.masked.env.names";
public static final String MASKED_PROPERTY_VALUE = "**** MASKED ****";
private static final Splitter SPLITTER = Splitter.on(',')
.trimResults()
.omitEmptyStrings();
public static Set<String> getMaskedPropertiesSet() {
String maskedResourceNames = DynamicPropertyFactory.getInstance().getStringProperty(MASKED_PROPERTY_NAMES, "").get();
Set<String> maskedPropertiesSet = getMaskedResourceSet(maskedResourceNames);
// add the MASKED_PROPERTY_NAMES property, itself, for super-duper security-obscurity
maskedPropertiesSet.add(MASKED_PROPERTY_NAMES);
return maskedPropertiesSet;
}
public static Set<String> getMaskedEnvSet() {
String maskedResourceNames = DynamicPropertyFactory.getInstance().getStringProperty(MASKED_ENV_NAMES, "").get();
return getMaskedResourceSet(maskedResourceNames);
}
private static Set<String> getMaskedResourceSet(String maskedResourceNames) {
Iterable<String> maskedResourceNamesIter = SPLITTER.split(maskedResourceNames);
Set<String> maskedResourceSet = new HashSet<String>();
for (String maskedResource : maskedResourceNamesIter) {
maskedResourceSet.add(maskedResource);
}
return maskedResourceSet;
}
}
| 3,316 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/jmx/JMXResource.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package netflix.adminresources.resources.jmx;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
import javax.management.MBeanOperationInfo;
import javax.management.MBeanParameterInfo;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.openmbean.CompositeDataSupport;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
/**
* Resource to expose JMX via JSON
*
* @author elandau
*
*/
@Produces({ MediaType.APPLICATION_JSON, MediaType.TEXT_HTML })
@Path("/jmx")
public class JMXResource {
private static final Logger LOG = LoggerFactory.getLogger(JMXResource.class);
private static final String CURRENT_VALUE = "CurrentValue";
private JmxService jmx;
public JMXResource() {
LOG.info("JMXResource created");
jmx = JmxService.getInstance();
}
/**
* Return JSON representing the entire tree of MBeans in DynaTree format.
*
* @param key
* @param mode
*/
@GET
public Response getMBeans(
@QueryParam("key") @DefaultValue("root") String key,
@QueryParam("mode") @DefaultValue("") String mode,
@QueryParam("jsonp") @DefaultValue("") String jsonp)
throws Exception {
LOG.info("key" + key);
DynaTreeNode root = new DynaTreeNode();
for (String domain : jmx.getDomainList()) {
root.putChild(jmx.getDomainTree(domain).setTitle(domain)
.setMode("domain"));
}
StringWriter out = new StringWriter();
if (jsonp.isEmpty()) {
root.getChildrenJSONArray().write(out);
} else {
out.append(jsonp).append("(");
root.getChildrenJSONArray().write(out);
out.append(");");
}
return Response.ok(out.toString()).header("Pragma", "no-cache")
.header("Cache-Control", "no-cache").header("Expires", "0")
.build();
}
/**
* Return all the attributes and operations for a single mbean
*
* @param key
* Exact object name of MBean in String form
* @param jsonp
*/
@GET
@Path("{key}")
public Response getMBean(@PathParam("key") String key,
@QueryParam("jsonp") @DefaultValue("") String jsonp)
throws Exception {
LOG.info("key: " + key);
JSONObject json = new JSONObject();
ObjectName name = new ObjectName(key);
json.put("domain", name.getDomain());
json.put("property", name.getKeyPropertyList());
if (key.contains("*")) {
JSONObject keys = new JSONObject();
for (Entry<String, Map<String, String>> attrs : jmx
.getMBeanAttributesByRegex(key).entrySet()) {
keys.put(attrs.getKey(), attrs.getValue());
}
json.put("attributes", keys);
json.put("multikey", true);
} else {
json.put("attributes", jmx.getMBeanAttributes(key));
json.put("multikey", false);
MBeanOperationInfo[] operations = jmx.getMBeanOperations(key);
JSONArray ar = new JSONArray();
for (MBeanOperationInfo operation : operations) {
JSONObject obj = new JSONObject();
obj.put("name", operation.getName());
obj.put("description", operation.getDescription());
obj.put("returnType", operation.getReturnType());
obj.put("impact", operation.getImpact());
JSONArray params = new JSONArray();
for (MBeanParameterInfo param : operation.getSignature()) {
JSONObject p = new JSONObject();
p.put("name", param.getName());
p.put("type", param.getType());
params.put(p);
}
obj.put("params", params);
ar.put(obj);
}
json.put("operations", ar);
}
StringWriter out = new StringWriter();
if (jsonp.isEmpty()) {
json.write(out);
} else {
out.append(jsonp).append("(");
json.write(out);
out.append(");");
}
return Response.ok(out.toString()).type(MediaType.APPLICATION_JSON)
.build();
}
/**
* Execute an operation on an mbean.
*
* @param formParams
* @param key
* @param jsonp
* @param name
*/
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Path("{key}/{op}")
public Response invokeMbeanOperation(
MultivaluedMap<String, String> formParams,
@PathParam("key") String key, @QueryParam("jsonp") String jsonp,
@PathParam("op") String name) throws Exception {
LOG.info("invoke " + key + " op=" + name);
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
Map<String, String> params = new TreeMap<String, String>();
for (Entry<String, List<String>> entry : formParams.entrySet()) {
if (entry.getKey().equals("op"))
continue;
if (entry.getValue().size() > 0)
params.put(entry.getKey(), entry.getValue().get(0));
else
params.put(entry.getKey(), "");
}
ObjectName objName = new ObjectName(key);
MBeanInfo info = mBeanServer.getMBeanInfo(objName);
for (MBeanOperationInfo op : info.getOperations()) {
if (op.getName().equals(name)) {
List<String> signature = new ArrayList<String>();
for (MBeanParameterInfo s : op.getSignature()) {
signature.add(s.getType());
}
Object result = mBeanServer.invoke(objName, name, params
.values().toArray(new String[params.size()]), signature
.toArray(new String[signature.size()]));
JSONObject json = new JSONObject();
json.put("key", key);
json.put("operation", name);
if (result != null) {
json.put("response", result.toString());
}
json.put("type", op.getReturnType());
StringWriter out = new StringWriter();
if (jsonp.isEmpty()) {
json.write(out);
} else {
out.append(jsonp).append("(");
json.write(out);
out.append(");");
}
return Response.ok(out.toString())
.type(MediaType.APPLICATION_JSON).build();
}
}
return Response.serverError().build();
}
/**
* Return all the attributes and operations for a MBeans whose
* {@link ObjectName}s match the passed in regex.
*
* @param objNameRegex
* regex for {@link ObjectName} of MBean(s). The
* {@link ObjectName} documentation explains what kinds of regex
* expressions are valid
* @param jsonp
* if non-empty, Jsonp output is returned instead of Json
*/
@GET
@Path("/mbeans")
public Response getMBeansByRegex(
@QueryParam("objNameRegex") String objNameRegex,
@QueryParam("jsonp") @DefaultValue("") String jsonp) {
try {
ObjectName objNameForRegex = new ObjectName(objNameRegex);
MBeanServer mBeanServer = ManagementFactory
.getPlatformMBeanServer();
Set<ObjectName> objs = mBeanServer
.queryNames(objNameForRegex, null);
JSONObject result = new JSONObject();
for (ObjectName objName : objs) {
JSONObject json = new JSONObject();
try {
json.put("attributes", emitAttributes(objName));
} catch (Exception e) {
json.put("attributes", emitAttributes(objName));
}
json.put("operations", emitOperations(objName));
result.put(objName.getCanonicalName(), json);
}
StringWriter out = new StringWriter();
if (jsonp.isEmpty()) {
result.write(out);
} else {
out.append(jsonp).append("(");
result.write(out);
out.append(");");
}
return Response.ok(out.toString()).type(MediaType.APPLICATION_JSON)
.build();
} catch (Exception e) {
LOG.error(
"Error while retrieving mbeans for regex:" + objNameRegex,
e);
return Response.serverError().entity(e.getMessage()).build();
}
}
/**
* Generate JSON for the MBean attributes
*
* @return
* @throws Exception
*/
private JSONObject emitAttributes(ObjectName objName) throws Exception {
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
MBeanInfo mBeanInfo = mBeanServer.getMBeanInfo(objName);
JSONObject resp = new JSONObject();
if (mBeanInfo != null) {
MBeanAttributeInfo[] attrs = mBeanInfo.getAttributes();
if (attrs != null) {
List<String> attrNames = new ArrayList<String>(attrs.length);
for (MBeanAttributeInfo attr : attrs) {
attrNames.add(attr.getName());
}
AttributeList attrList = mBeanServer.getAttributes(objName,
attrNames.toArray(new String[0]));
for (Attribute attr : attrList.asList()) {
Object value = attr.getValue();
String attrName = attr.getName();
if (attrName != null && value != null) {
String attrValue = null;
if (value instanceof CompositeDataSupport) {
CompositeDataSupport compositeValue = (CompositeDataSupport) value;
if (compositeValue != null) {
try {
if (compositeValue.containsKey(CURRENT_VALUE)) {
Object curValue = compositeValue
.get(CURRENT_VALUE);
attrValue = (curValue == null ? "null"
: curValue.toString());
}
}
catch (Exception e) {
attrValue = compositeValue.toString();
}
}
}
if (attrValue == null) {
attrValue = value.toString();
}
resp.put(attrName, (attrValue == null ? "null"
: attrValue));
}
}
}
}
return resp;
}
/**
* Generate JSON for the MBean operations
*
* @param objName
* @return
* @throws Exception
*/
private JSONArray emitOperations(ObjectName objName) throws Exception {
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
MBeanInfo mBeanInfo = mBeanServer.getMBeanInfo(objName);
JSONArray ar = new JSONArray();
MBeanOperationInfo[] operations = mBeanInfo.getOperations();
for (MBeanOperationInfo operation : operations) {
JSONObject obj = new JSONObject();
obj.put("name", operation.getName());
obj.put("description", operation.getDescription());
obj.put("returnType", operation.getReturnType());
obj.put("impact", operation.getImpact());
JSONArray params = new JSONArray();
for (MBeanParameterInfo param : operation.getSignature()) {
JSONObject p = new JSONObject();
p.put("name", param.getName());
p.put("type", param.getType());
params.put(p);
}
obj.put("params", params);
ar.put(obj);
}
return ar;
}
}
| 3,317 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/jmx/DynaTreeNode.java | package netflix.adminresources.resources.jmx;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import java.util.Map;
import java.util.TreeMap;
/**
* Represents a single node in a tree and simplifies adding children and
* serializing the final tree into JSON.
*
* @author elandau
*/
public class DynaTreeNode {
private String title;
private String key;
private String mode;
private Map<String, DynaTreeNode> children;
private boolean noLink = true;
public DynaTreeNode() {
}
public DynaTreeNode setTitle(String title) {
this.title = title;
return this;
}
public String getTitle() {
return this.title;
}
public DynaTreeNode setNoLink(boolean noLink) {
this.noLink = noLink;
return this;
}
public boolean getNoLink() {
return this.noLink;
}
public DynaTreeNode setKey(String key) {
this.key = key;
return this;
}
public String getKey() {
return this.key;
}
public DynaTreeNode setMode(String mode) {
this.mode = mode;
return this;
}
public String getMode() {
return this.mode;
}
public Map<String, DynaTreeNode> getChildren() {
if (children == null) {
children = new TreeMap<String, DynaTreeNode>();
}
return children;
}
public DynaTreeNode getChild(String title) {
return getChildren().get(title);
}
public void putChild(DynaTreeNode child) {
getChildren().put(child.title, child);
}
@SuppressWarnings("unchecked")
public JSONObject toJSONObject() throws JSONException {
return new JSONObject().put("title", title).put("key", key).put("noLink", noLink).put("mode", mode)
.put("expand", true).put("children", getChildrenJSONArray());
}
@SuppressWarnings("unchecked")
public JSONArray getChildrenJSONArray() {
JSONArray ar = null;
if (children != null) {
ar = new JSONArray();
for (DynaTreeNode a : children.values()) {
try {
ar.put(a.toJSONObject());
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
return ar;
}
}
| 3,318 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/jmx/JmxService.java | package netflix.adminresources.resources.jmx;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.InstanceNotFoundException;
import javax.management.IntrospectionException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
import javax.management.MBeanOperationInfo;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.management.openmbean.CompositeDataSupport;
import java.lang.management.ManagementFactory;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class JmxService {
private static final Logger LOG = LoggerFactory.getLogger(JmxService.class);
private static final String CSV_PATTERN = "\"([^\"]+?)\",?|([^,]+),?|,";
private static final String CURRENT_VALUE = "CurrentValue";
private static final String MODE_DOMAIN = "domain";
private static final String MODE_INNER = "inner";
private static final String MODE_LEAF = "leaf";
private static class Holder {
private static final JmxService instance = new JmxService();
}
private final MBeanServer mBeanServer;
private static Pattern csvRE = Pattern.compile(CSV_PATTERN);
private JmxService() {
mBeanServer = ManagementFactory.getPlatformMBeanServer();
}
public static JmxService getInstance() {
return Holder.instance;
}
/**
* Return the list of all domains on this server
* @return
*/
public List<String> getDomainList() {
return Lists.newArrayList(mBeanServer.getDomains());
}
/**
* Return subtree of nodes for a domain
*/
public DynaTreeNode getDomainTree(String domainName) {
DynaTreeNode domainNode = new DynaTreeNode()
.setTitle(domainName)
.setKey(domainName)
.setMode(MODE_DOMAIN);
try {
// List all objects in the domain
ObjectName name = new ObjectName(domainName + ":*");
Set<ObjectName> objs = mBeanServer.queryNames(name, null);
// Convert object naems to a tree
for (ObjectName objName : objs) {
MBeanInfo info = mBeanServer.getMBeanInfo(objName);
Matcher m = csvRE.matcher(objName.getKeyPropertyListString());
DynaTreeNode node = domainNode;
StringBuilder innerKey = new StringBuilder();
innerKey.append(domainName).append(":");
while (m.find()) {
String title = StringUtils.removeEnd(m.group(), ",");
String key = StringUtils.substringBefore(title, "=");
String value = StringUtils.substringAfter(title, "=");
value = StringUtils.removeStart(value, "\"");
value = StringUtils.removeEnd (value, "\"");
innerKey.append(title).append(",");
DynaTreeNode next = node.getChild(value);
if (next == null) {
next = new DynaTreeNode()
.setTitle(value)
.setMode(MODE_INNER)
.setKey(innerKey.toString() + "*")
.setNoLink(false);
node.putChild(next);
}
node = next;
}
node.setKey(objName.getCanonicalName())
.setMode(MODE_LEAF);
if ( info.getAttributes() != null
|| info.getOperations() != null
|| info.getNotifications() != null) {
node.setNoLink(false);
}
}
} catch (MalformedObjectNameException e) {
LOG.error("Exception in getDomainTree ", e);
} catch (IntrospectionException e) {
LOG.error("Exception in getDomainTree ", e);
} catch (ReflectionException e) {
LOG.error("Exception in getDomainTree ", e);
} catch (InstanceNotFoundException e) {
LOG.error("Exception in getDomainTree ", e);
} catch (RuntimeException e) {
LOG.error("Exception in getDomainTree ", e);
}
return domainNode;
}
/**
* Return all keysace in a domain
* @param domainName
* @return
*/
public List<String> getDomainKeys(String domainName) {
return getKeysFromRegex(domainName + ":*");
}
/**
* Return the list of all keys matching a regex
* @param regex
* @return
*/
public List<String> getKeysFromRegex(String regex) {
List<String> keys = Lists.newArrayList();
try {
// List all objects in the domain
ObjectName name = new ObjectName(regex);
Set<ObjectName> objs = mBeanServer.queryNames(name, null);
// Convert object naems to a tree
for (ObjectName objName : objs) {
MBeanInfo info = mBeanServer.getMBeanInfo(objName);
keys.add(objName.getCanonicalName());
}
} catch (Exception e) {
LOG.error("Exception in getKeysFromRegex ", e);
}
return keys;
}
/**
* Return a map of all attributes for objects matching the regex.
* @param regex
* @return
* @throws Exception
*/
public Map<String, Map<String, String>> getMBeanAttributesByRegex(String regex) throws Exception {
Map<String, Map<String, String>> result = Maps.newLinkedHashMap();
ObjectName name = new ObjectName(regex);
Set<ObjectName> objs = mBeanServer.queryNames(name, null);
// Convert object naems to a tree
for (ObjectName objName : objs) {
result.put(objName.getCanonicalName(), getMBeanAttributes(objName));
}
return result;
}
/**
* Get list of all attributes of the specified key
* @param key
* @return
*/
public Map<String, String> getMBeanAttributes(String key) throws Exception {
return getMBeanAttributes(new ObjectName(key));
}
/**
* Get list of all attributes of an object
* @param objName
* @return
* @throws Exception
*/
private Map<String, String> getMBeanAttributes(ObjectName objName) throws Exception {
Map<String, String> response = Maps.newLinkedHashMap();
// Look for the object
MBeanInfo mBeanInfo = mBeanServer.getMBeanInfo(objName);
if (mBeanInfo != null) {
// Does it have attributes?
MBeanAttributeInfo[] attrs = mBeanInfo.getAttributes();
if (attrs != null) {
// List all attributes
List<String> attrNames = Lists.newArrayList();
for (MBeanAttributeInfo attr : attrs) {
attrNames.add(attr.getName());
}
AttributeList attrList = mBeanServer.getAttributes(objName, attrNames.toArray(new String[0]));
// Process each attribute
for (Attribute attr : attrList.asList()) {
String attrName = attr.getName();
Object value = attr.getValue();
String attrValue = null;
// Attribute has data
if (value != null) {
// Special case of CompositeDataSuppert
if (value instanceof CompositeDataSupport) {
CompositeDataSupport compositeValue = (CompositeDataSupport) value;
if (compositeValue != null) {
try {
if (compositeValue.containsKey(CURRENT_VALUE)) {
Object curValue = compositeValue.get(CURRENT_VALUE);
attrValue = (curValue == null ? "null" : curValue.toString());
}
}
catch (Exception e) {
attrValue = compositeValue.toString();
}
}
}
if (attrValue == null) {
attrValue = value.toString();
}
}
else {
value = "none";
}
response.put(attrName, attrValue);
}
}
}
return response;
}
/**
* Return all operations for the specified mbean name
* @param name
* @return
* @throws Exception
*/
public MBeanOperationInfo[] getMBeanOperations(String name) throws Exception {
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
MBeanInfo mBeanInfo = mBeanServer.getMBeanInfo(new ObjectName(name));
return mBeanInfo.getOperations();
}
}
| 3,319 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/model/PropertySource.java | package netflix.adminresources.resources.model;
public class PropertySource {
private String sourceName;
private String value;
public PropertySource() {
}
public PropertySource(String sourceName, String value) {
this.sourceName = sourceName;
this.value = value;
}
public String getSourceName() {
return sourceName;
}
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public String toString() {
return "PropertySource [sourceName=" + sourceName + ", value=" + value + "]";
}
}
| 3,320 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/resources/model/Property.java | package netflix.adminresources.resources.model;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.List;
@XmlRootElement(name = "property")
public class Property {
private String name;
private String value;
private List<PropertySource> sources;
public Property(String name, String value, List<PropertySource> sources) {
this.name = name;
this.value = value;
this.sources = sources;
}
public Property() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public List<PropertySource> getSources() {
return sources;
}
public void setSources(List<PropertySource> sources) {
this.sources = sources;
}
@Override
public String toString() {
return "Property [name=" + name + ", value=" + value + ", sources=" + sources + "]";
}
}
| 3,321 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/GuicePage.java | package netflix.adminresources.pages;
import com.google.inject.Module;
import java.util.Arrays;
import java.util.List;
import netflix.adminresources.AbstractAdminPageInfo;
import netflix.adminresources.AdminPage;
@AdminPage
public class GuicePage extends AbstractAdminPageInfo {
public static final String PAGE_ID = "guice";
public static final String NAME = "Guice";
public GuicePage() {
super(PAGE_ID, NAME);
}
@Override
public List<Module> getGuiceModules() {
return Arrays.<Module>asList(new KaryonGrapherModule());
}
}
| 3,322 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/GuiceProvisionPage.java | package netflix.adminresources.pages;
import com.google.inject.Module;
import java.util.Arrays;
import java.util.List;
import netflix.adminresources.AbstractAdminPageInfo;
import netflix.adminresources.AdminPage;
@AdminPage
public class GuiceProvisionPage extends AbstractAdminPageInfo {
public static final String PAGE_ID = "guiceprovision";
public static final String NAME = "GuiceProvision";
public GuiceProvisionPage() {
super(PAGE_ID, NAME);
}
@Override
public List<Module> getGuiceModules() {
return Arrays.<Module>asList(new KaryonGrapherModule());
}
}
| 3,323 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/PropertiesPage.java | package netflix.adminresources.pages;
import netflix.adminresources.AbstractAdminPageInfo;
import netflix.adminresources.AdminPage;
@AdminPage
public class PropertiesPage extends AbstractAdminPageInfo {
public static final String PAGE_ID = "archprops";
public static final String NAME = "Archaius";
public PropertiesPage() {
super(PAGE_ID, NAME);
}
}
| 3,324 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/EnvPage.java | package netflix.adminresources.pages;
import netflix.adminresources.AbstractAdminPageInfo;
import netflix.adminresources.AdminPage;
@AdminPage
public class EnvPage extends AbstractAdminPageInfo {
public static final String PAGE_ID = "env";
public static final String NAME = "Environment";
public EnvPage() {
super(PAGE_ID, NAME);
}
}
| 3,325 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/JmxPage.java | package netflix.adminresources.pages;
import netflix.adminresources.AbstractAdminPageInfo;
import netflix.adminresources.AdminPage;
@AdminPage
public class JmxPage extends AbstractAdminPageInfo {
public static final String PAGE_ID = "jmx";
public static final String NAME = "JMX";
public JmxPage() {
super(PAGE_ID, NAME);
}
}
| 3,326 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/GuiceGraphPage.java | package netflix.adminresources.pages;
import com.google.inject.Module;
import java.util.Arrays;
import java.util.List;
import netflix.adminresources.AbstractAdminPageInfo;
import netflix.adminresources.AdminPage;
@AdminPage
public class GuiceGraphPage extends AbstractAdminPageInfo {
public static final String PAGE_ID = "guicegraph";
public static final String NAME = "GuiceGraph";
public GuiceGraphPage() {
super(PAGE_ID, NAME);
}
@Override
public List<Module> getGuiceModules() {
return Arrays.<Module>asList(new KaryonGrapherModule());
}
}
| 3,327 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/KaryonGrapherModule.java | package netflix.adminresources.pages;
import com.google.inject.AbstractModule;
import com.google.inject.grapher.NameFactory;
import com.google.inject.grapher.ShortNameFactory;
import com.google.inject.grapher.graphviz.PortIdFactory;
import com.google.inject.grapher.graphviz.PortIdFactoryImpl;
public class KaryonGrapherModule extends AbstractModule {
@Override
protected void configure() {
bind(NameFactory.class).to(ShortNameFactory.class);
bind(PortIdFactory.class).to(PortIdFactoryImpl.class);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public boolean equals(Object obj) {
return getClass().equals(obj.getClass());
}
}
| 3,328 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/JarsPage.java | package netflix.adminresources.pages;
import netflix.adminresources.AbstractAdminPageInfo;
import netflix.adminresources.AdminPage;
@AdminPage
public class JarsPage extends AbstractAdminPageInfo {
public static final String PAGE_ID = "jars";
public static final String NAME = "Jars";
public JarsPage() {
super(PAGE_ID, NAME);
}
}
| 3,329 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources | Create_ds/karyon/karyon2-admin-web/src/main/java/netflix/adminresources/pages/AdminPageResource.java | package netflix.adminresources.pages;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.sun.jersey.api.view.Viewable;
import netflix.admin.AdminConfigImpl;
import netflix.admin.AdminContainerConfig;
import netflix.adminresources.AdminPageInfo;
import netflix.adminresources.AdminPageRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.PostConstruct;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
@Path("/")
@Produces(MediaType.TEXT_HTML)
@Singleton
public class AdminPageResource {
private static final Logger LOG = LoggerFactory.getLogger(AdminPageResource.class);
@Inject(optional = true)
private AdminContainerConfig adminContainerConfig;
@Inject(optional = true)
private AdminPageRegistry adminPageRegistry;
@PostConstruct
public void init() {
if (adminPageRegistry == null) {
adminPageRegistry = new AdminPageRegistry();
}
if (adminContainerConfig == null) {
adminContainerConfig = new AdminConfigImpl();
}
}
@GET()
public Viewable showIndex() {
Map<String, Object> model = new HashMap<String, Object>();
model.put("ajax_base", adminContainerConfig.ajaxDataResourceContext());
model.put("template_base", adminContainerConfig.templateResourceContext());
if (adminPageRegistry != null) {
final Collection<AdminPageInfo> adminPages = adminPageRegistry.getAllPages();
model.put("adminPages", adminPages);
}
if (adminContainerConfig != null) {
model.put("homeScriptResources", adminContainerConfig.homeScriptResources());
}
return new Viewable("/webadmin/home.ftl", model, adminContainerConfig.getClass());
}
@GET
@Path("/{view}")
public Viewable showViewIndex(
@PathParam("view") String view,
@QueryParam("id") @DefaultValue("") String id
) {
LOG.info(view);
Map<String, Object> model = new HashMap<String, Object>();
model.put("id", id);
model.put("ajax_base", adminContainerConfig.ajaxDataResourceContext());
model.put("template_base", adminContainerConfig.templateResourceContext());
if (adminPageRegistry != null && adminPageRegistry.getPageInfo(view) != null) {
AdminPageInfo pageInfo = adminPageRegistry.getPageInfo(view);
final Map<String, Object> pageDataModel = pageInfo.getDataModel();
if (pageDataModel != null) {
model.putAll(pageDataModel);
}
return new Viewable(pageInfo.getPageTemplate(), model, pageInfo.getClass());
}
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
@POST
@Path("/{view}")
public Viewable createObject(
@PathParam("view") String view,
@QueryParam("id") @DefaultValue("") String id
) {
LOG.info(view);
Map<String, Object> model = new HashMap<String, Object>();
model.put("id", id);
return new Viewable("/webadmin/" + view + "/create.ftl", model, adminContainerConfig.getClass());
}
@GET
@Path("/jmx/mbean")
public Viewable showJMXMbean(@QueryParam("id") String id, @QueryParam("key") String key) {
LOG.info("");
Map<String, Object> model = new HashMap<String, Object>();
model.put("id", id);
model.put("key", key);
model.put("ajax_base", adminContainerConfig.ajaxDataResourceContext());
return new Viewable("/webadmin/jmx/view.ftl", model, adminContainerConfig.getClass());
}
}
| 3,330 |
0 | Create_ds/karyon/karyon2-admin-web/src/main/java/com/google/inject | Create_ds/karyon/karyon2-admin-web/src/main/java/com/google/inject/grapher/KaryonAbstractInjectorGrapher.java | package com.google.inject.grapher;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.inject.Binding;
import com.google.inject.ConfigurationException;
import com.google.inject.Injector;
import com.google.inject.Key;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public abstract class KaryonAbstractInjectorGrapher implements InjectorGrapher {
private final RootKeySetCreator rootKeySetCreator;
private final AliasCreator aliasCreator;
private final NodeCreator nodeCreator;
private final EdgeCreator edgeCreator;
/**
* Parameters used to override default settings of the grapher.
*
* @since 4.0
*/
public static final class GrapherParameters {
private RootKeySetCreator rootKeySetCreator = new DefaultRootKeySetCreator();
private AliasCreator aliasCreator = new ProviderAliasCreator();
private NodeCreator nodeCreator = new DefaultNodeCreator();
private EdgeCreator edgeCreator = new DefaultEdgeCreator();
public RootKeySetCreator getRootKeySetCreator() {
return rootKeySetCreator;
}
public GrapherParameters setRootKeySetCreator(
RootKeySetCreator rootKeySetCreator) {
this.rootKeySetCreator = rootKeySetCreator;
return this;
}
public AliasCreator getAliasCreator() {
return aliasCreator;
}
public GrapherParameters setAliasCreator(AliasCreator aliasCreator) {
this.aliasCreator = aliasCreator;
return this;
}
public NodeCreator getNodeCreator() {
return nodeCreator;
}
public GrapherParameters setNodeCreator(NodeCreator nodeCreator) {
this.nodeCreator = nodeCreator;
return this;
}
public EdgeCreator getEdgeCreator() {
return edgeCreator;
}
public GrapherParameters setEdgeCreator(EdgeCreator edgeCreator) {
this.edgeCreator = edgeCreator;
return this;
}
}
public KaryonAbstractInjectorGrapher() {
this(new GrapherParameters());
}
public KaryonAbstractInjectorGrapher(GrapherParameters options) {
this.rootKeySetCreator = options.getRootKeySetCreator();
this.aliasCreator = options.getAliasCreator();
this.nodeCreator = options.getNodeCreator();
this.edgeCreator = options.getEdgeCreator();
}
@Override
public final void graph(Injector injector) throws IOException {
graph(injector, rootKeySetCreator.getRootKeys(injector));
}
@Override
public final void graph(Injector injector, Set<Key<?>> root)
throws IOException {
reset();
Iterable<Binding<?>> bindings = getBindings(injector, root);
Map<NodeId, NodeId> aliases = resolveAliases(aliasCreator
.createAliases(bindings));
createNodes(nodeCreator.getNodes(bindings), aliases);
createEdges(edgeCreator.getEdges(bindings), aliases);
postProcess();
}
/** Resets the state of the grapher before rendering a new graph. */
protected abstract void reset() throws IOException;
/** Adds a new interface node to the graph. */
protected abstract void newInterfaceNode(InterfaceNode node)
throws IOException;
/** Adds a new implementation node to the graph. */
protected abstract void newImplementationNode(ImplementationNode node)
throws IOException;
/** Adds a new instance node to the graph. */
protected abstract void newInstanceNode(InstanceNode node)
throws IOException;
/** Adds a new dependency edge to the graph. */
protected abstract void newDependencyEdge(DependencyEdge edge)
throws IOException;
/** Adds a new binding edge to the graph. */
protected abstract void newBindingEdge(BindingEdge edge) throws IOException;
/**
* Performs any post processing required after all nodes and edges have been
* added.
*/
protected abstract void postProcess() throws IOException;
private void createNodes(Iterable<Node> nodes, Map<NodeId, NodeId> aliases)
throws IOException {
for (Node node : nodes) {
NodeId originalId = node.getId();
NodeId resolvedId = resolveAlias(aliases, originalId);
node = node.copy(resolvedId);
// Only render nodes that aren't aliased to some other node.
if (resolvedId.equals(originalId)) {
if (node instanceof InterfaceNode) {
newInterfaceNode((InterfaceNode) node);
} else if (node instanceof ImplementationNode) {
newImplementationNode((ImplementationNode) node);
} else {
newInstanceNode((InstanceNode) node);
}
}
}
}
private void createEdges(Iterable<Edge> edges, Map<NodeId, NodeId> aliases)
throws IOException {
for (Edge edge : edges) {
edge = edge.copy(resolveAlias(aliases, edge.getFromId()),
resolveAlias(aliases, edge.getToId()));
if (!edge.getFromId().equals(edge.getToId())) {
if (edge instanceof BindingEdge) {
newBindingEdge((BindingEdge) edge);
} else {
newDependencyEdge((DependencyEdge) edge);
}
}
}
}
private NodeId resolveAlias(Map<NodeId, NodeId> aliases, NodeId nodeId) {
return aliases.containsKey(nodeId) ? aliases.get(nodeId) : nodeId;
}
/**
* Transitively resolves aliases. Given aliases (X to Y) and (Y to Z), it
* will return mappings (X to Z) and (Y to Z).
*/
private Map<NodeId, NodeId> resolveAliases(Iterable<Alias> aliases) {
Map<NodeId, NodeId> resolved = Maps.newHashMap();
Map<NodeId, Set<NodeId>> inverse = Maps.newHashMap();
for (Alias alias : aliases) {
NodeId from = alias.getFromId();
NodeId to = alias.getToId();
if (resolved.containsKey(to)) {
to = resolved.get(to);
}
resolved.put(from, to);
if (inverse.get(to) == null) {
inverse.put(to, Sets.<NodeId> newHashSet());
}
inverse.get(to).add(from);
Set<NodeId> prev = inverse.get(from);
if (prev != null) {
for (NodeId id : prev) {
resolved.remove(id);
inverse.get(from).remove(id);
resolved.put(id, to);
inverse.get(to).add(id);
}
}
}
return resolved;
}
/**
* Returns the bindings for the root keys and their transitive dependencies.
*/
private Iterable<Binding<?>> getBindings(Injector injector, Set<Key<?>> root) {
Set<Key<?>> keys = Sets.newHashSet(root);
Set<Key<?>> visitedKeys = Sets.newHashSet();
List<Binding<?>> bindings = Lists.newArrayList();
TransitiveDependencyVisitor keyVisitor = new TransitiveDependencyVisitor();
while (!keys.isEmpty()) {
Iterator<Key<?>> iterator = keys.iterator();
Key<?> key = iterator.next();
iterator.remove();
if (!visitedKeys.contains(key)) {
try {
Binding<?> binding = injector.getBinding(key);
bindings.add(binding);
visitedKeys.add(key);
keys.addAll(binding.acceptTargetVisitor(keyVisitor));
}
catch (ConfigurationException e) {
System.out.println("Missing binding for : " + key);
visitedKeys.add(key);
}
}
}
return bindings;
}
} | 3,331 |
0 | Create_ds/sagemaker-spark-container/test/resources/code/java/hello-java-spark/src/main/java/com/amazonaws/sagemaker/spark | Create_ds/sagemaker-spark-container/test/resources/code/java/hello-java-spark/src/main/java/com/amazonaws/sagemaker/spark/test/HelloJavaSparkApp.java | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You
* may not use this file except in compliance with the License. A copy of
* the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
* ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package com.amazonaws.sagemaker.spark.test;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.types.DataTypes;
public class HelloJavaSparkApp {
public static void main(final String[] args) {
System.out.println("Hello World, this is Java-Spark!");
final CommandLine parsedArgs = parseArgs(args);
final String inputPath = parsedArgs.getOptionValue("input");
final String outputPath = parsedArgs.getOptionValue("output");
final SparkSession spark = SparkSession.builder()
.appName("Hello Spark App")
.getOrCreate();
System.out.println("Got a Spark session with version: " + spark.version());
// Load test data set
System.out.println("Reading input from: " + inputPath);
final Dataset salesDF = spark.read().json(inputPath);
salesDF.printSchema();
salesDF.createOrReplaceTempView("sales");
final Dataset topDF = spark.sql("SELECT date, sale FROM sales WHERE sale > 750 SORT BY sale DESC");
// Show the first 20 rows of the dataframe
topDF.show();
final Dataset avgDF = salesDF.groupBy("date").avg().orderBy("date");
System.out.println("Collected average sales: " + StringUtils.join(avgDF.collectAsList()));
// Define a UDF that doubles an integer column
spark.sqlContext().udf().register("double", (Long n) -> n + n, DataTypes.LongType);
final Dataset saleDoubleDF = salesDF
.selectExpr("date", "sale", "double(sale) as sale_double")
.orderBy("date", "sale");
saleDoubleDF.show();
System.out.println("Writing output to: " + outputPath);
saleDoubleDF.coalesce(1).write().json(outputPath);
spark.stop();
}
private static CommandLine parseArgs(final String[] args) {
final Options options = new Options();
final CommandLineParser parser = new BasicParser();
final Option input = new Option("i", "input", true, "input path");
input.setRequired(true);
options.addOption(input);
final Option output = new Option("o", "output", true, "output path");
output.setRequired(true);
options.addOption(output);
try {
return parser.parse(options, args);
} catch (ParseException e) {
new HelpFormatter().printHelp("HelloScalaSparkApp --input /opt/ml/input/foo --output /opt/ml/output/bar",
options);
throw new RuntimeException(e);
}
}
}
| 3,332 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/HelloWorldActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activity;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
/**
* Contract of the hello world activities
*/
@Activities
public interface HelloWorldActivities {
@Activity(name = "PrintHello", version = "1.0")
@ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 30, defaultTaskStartToCloseTimeoutSeconds = 10)
void printHello(String name);
}
| 3,333 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/HelloWorldWorkflowImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
/**
* Implementation of the hello world workflow
*/
public class HelloWorldWorkflowImpl implements HelloWorldWorkflow{
HelloWorldActivitiesClient client = new HelloWorldActivitiesClientImpl();
@Override
public void helloWorld(String name) {
client.printHello(name);
}
} | 3,334 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/ActivityHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
/**
* This is the process which hosts all Activities in this sample
*/
public class ActivityHost {
private static final String ACTIVITIES_TASK_LIST = "HelloWorld";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
final ActivityWorker worker = new ActivityWorker(swfService, domain, ACTIVITIES_TASK_LIST);
// Create activity implementations
HelloWorldActivities helloWorldActivitiesImpl = new HelloWorldActivitiesImpl();
worker.addActivitiesImplementation(helloWorldActivitiesImpl);
worker.start();
System.out.println("Activity Worker Started for Task List: " + worker.getTaskListToPoll());
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
worker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Activity Worker Exited.");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,335 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/HelloWorldWorkflow.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
import com.amazonaws.services.simpleworkflow.flow.annotations.Execute;
import com.amazonaws.services.simpleworkflow.flow.annotations.Workflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowRegistrationOptions;
/**
* Contract of the hello world workflow
*/
@Workflow
@WorkflowRegistrationOptions(defaultExecutionStartToCloseTimeoutSeconds = 60)
public interface HelloWorldWorkflow {
@Execute(version = "1.0")
void helloWorld(String name);
}
| 3,336 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/HelloWorldActivitiesImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
/**
* Implementation of the hello world activities
*/
public class HelloWorldActivitiesImpl implements HelloWorldActivities {
@Override
public void printHello(String name) {
System.out.println("Hello " + name + "!");
}
}
| 3,337 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/WorkflowExecutionStarter.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
public class WorkflowExecutionStarter {
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
HelloWorldWorkflowClientExternalFactory clientFactory = new HelloWorldWorkflowClientExternalFactoryImpl(swfService,
domain);
HelloWorldWorkflowClientExternal workflow = clientFactory.getClient();
// Start Wrokflow Execution
workflow.helloWorld("User");
// WorkflowExecution is available after workflow creation
WorkflowExecution workflowExecution = workflow.getWorkflowExecution();
System.out.println("Started helloWorld workflow with workflowId=\"" + workflowExecution.getWorkflowId()
+ "\" and runId=\"" + workflowExecution.getRunId() + "\"");
}
}
| 3,338 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/HelloWorldTest.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.amazonaws.services.simpleworkflow.flow.annotations.Asynchronous;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
import com.amazonaws.services.simpleworkflow.flow.core.Task;
import com.amazonaws.services.simpleworkflow.flow.core.TryFinally;
import com.amazonaws.services.simpleworkflow.flow.junit.FlowBlockJUnit4ClassRunner;
import com.amazonaws.services.simpleworkflow.flow.junit.WorkflowTest;
@RunWith(FlowBlockJUnit4ClassRunner.class)
public class HelloWorldTest {
private final class TestHelloWorldActivities implements HelloWorldActivities {
private String greeting;
@Override
public void printHello(String name) {
greeting = "Hello " + name + "!";
}
public String getGreeting() {
return greeting;
}
}
@Rule
public WorkflowTest workflowTest = new WorkflowTest();
private final HelloWorldWorkflowClientFactory workflowFactory = new HelloWorldWorkflowClientFactoryImpl();
private TestHelloWorldActivities activitiesImplementation;
@Before
public void setUp() throws Exception {
workflowTest.addWorkflowImplementationType(HelloWorldWorkflowImpl.class);
activitiesImplementation = new TestHelloWorldActivities();
workflowTest.addActivitiesImplementation(activitiesImplementation);
}
/**
* Test through generated workflow client. As workflow unit tests run in
* dummy workflow context the same client that is used for creation of child
* workflows is used.
*/
@Test
public void testThroughClient() throws Exception {
HelloWorldWorkflowClient workflow = workflowFactory.getClient();
Promise<Void> done = workflow.helloWorld("World");
assertGreeting(done);
}
@Asynchronous
private void assertGreeting(Promise<Void> done) {
Assert.assertEquals("Hello World!", activitiesImplementation.getGreeting());
}
@Test
public void testThroughClientAssertWithTask() throws Exception {
HelloWorldWorkflowClient workflow = workflowFactory.getClient();
Promise<Void> done = workflow.helloWorld("AWS");
new Task(done) {
@Override
protected void doExecute() throws Throwable {
Assert.assertEquals("Hello AWS!", activitiesImplementation.getGreeting());
}
};
}
/**
* Instantiate workflow implementation object directly. Note that any object
* that is part of workflow can be unit tested through direct instantiation.
*/
@Test
public void directTest() {
final HelloWorldWorkflow workflow = new HelloWorldWorkflowImpl();
new TryFinally() {
@Override
protected void doTry() throws Throwable {
// helloWorld returns void so we use TryFinally
// to wait for its completion
workflow.helloWorld("SWF");
}
@Override
protected void doFinally() throws Throwable {
Assert.assertEquals("Hello SWF!", activitiesImplementation.getGreeting());
}
};
}
}
| 3,339 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/helloworld/WorkflowHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.helloworld;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
public class WorkflowHost {
private static final String DECISION_TASK_LIST = "HelloWorldWorkflow";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
final WorkflowWorker worker = new WorkflowWorker(swfService, domain, DECISION_TASK_LIST);
worker.addWorkflowImplementationType(HelloWorldWorkflowImpl.class);
worker.start();
System.out.println("Workflow Host Service Started...");
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
worker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Workflow Host Service Terminated...");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,340 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/AverageCalculatorWorkflowImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
public class AverageCalculatorWorkflowImpl implements AverageCalculatorWorkflow {
@Override
public void average(String bucketName, String fileName, int numberOfWorkers) {
PartitionedAverageCalculator calculator = new PartitionedAverageCalculatorImpl(numberOfWorkers, bucketName);
Promise<Double> result = calculator.computeAverage(fileName);
calculator.reportResult(result);
}
}
| 3,341 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/PartitionedAverageCalculatorImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import java.util.ArrayList;
import java.util.List;
import com.amazonaws.services.simpleworkflow.flow.annotations.Asynchronous;
import com.amazonaws.services.simpleworkflow.flow.annotations.Wait;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
public class PartitionedAverageCalculatorImpl implements PartitionedAverageCalculator {
private final AverageCalculatorActivitiesClient client = new AverageCalculatorActivitiesClientImpl();
private final int numberOfWorkers;
private final String bucketName;
public PartitionedAverageCalculatorImpl(int numberOfWorkers, String bucketName) {
this.numberOfWorkers = numberOfWorkers;
this.bucketName = bucketName;
}
@Override
public Promise<Double> computeAverage(String inputFile) {
Promise<Integer> dataSize = client.computeDataSizeForInputData(bucketName, inputFile);
return computeAverageDistributed(inputFile, dataSize);
}
@Asynchronous
private Promise<Double> computeAverageDistributed(String inputFile, Promise<Integer> dataSize) {
int chunkSize = dataSize.get() / numberOfWorkers;
// Create an array list to hold the result returned by each worker
List<Promise<Integer>> results = new ArrayList<Promise<Integer>>();
for (int chunkNumber = 0; chunkNumber < numberOfWorkers; chunkNumber++) {
// Splitting computation for each chunk as separate activity
results.add(client.computeSumForChunk(bucketName, inputFile, chunkNumber, chunkSize));
}
// Merge phase
return mergeSumAndComputeAverage(results, dataSize.get());
}
@Asynchronous
private Promise<Double> mergeSumAndComputeAverage(@Wait List<Promise<Integer>> results, int dataSize){
int totalSum = 0;
for(Promise<Integer> workerSum: results){
totalSum += workerSum.get();
}
return Promise.asPromise((double) totalSum / (double) dataSize);
}
@Override
@Asynchronous
public void reportResult(Promise<Double> result){
client.reportResult(result);
}
}
| 3,342 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/AverageCalculatorWorkflow.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import com.amazonaws.services.simpleworkflow.flow.annotations.Execute;
import com.amazonaws.services.simpleworkflow.flow.annotations.Workflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowRegistrationOptions;
@Workflow
@WorkflowRegistrationOptions(
defaultExecutionStartToCloseTimeoutSeconds = 300,
defaultTaskStartToCloseTimeoutSeconds = 10)
public interface AverageCalculatorWorkflow {
@Execute(name = "SplitMergeWorkflowExample", version = "1.0")
void average(String bucketName, String fileName, final int numberOfWorkers);
}
| 3,343 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/SplitMergeConfigKeys.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
public class SplitMergeConfigKeys {
static final String S3_BUCKET_NAME = "SplitMerge.Input.BucketName";
static final String S3_INPUT_FILENAME = "SplitMerge.Input.FileName";
static final String NUMBER_OF_WORKERS = "SplitMerge.Input.NumberOfWorkers";
}
| 3,344 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/PartitionedAverageCalculator.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
public interface PartitionedAverageCalculator {
public abstract Promise<Double> computeAverage(String inputFile);
public abstract void reportResult(Promise<Double> result);
}
| 3,345 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/AverageCalculatorActivitiesImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.S3Object;
public class AverageCalculatorActivitiesImpl implements AverageCalculatorActivities {
private static final int ROW_SIZE = 7;
private final AmazonS3 storage;
public AverageCalculatorActivitiesImpl(AmazonS3 storage) {
this.storage = storage;
}
@Override
public int computeDataSizeForInputData(String bucketName, String filename) {
ObjectMetadata metadata = storage.getObjectMetadata(bucketName, filename);
long size = metadata.getContentLength();
return (int) size / ROW_SIZE;
}
@Override
public int computeSumForChunk(String bucketName, String filename, int chunkNumber, int chunkSize) throws IOException {
int sum = 0;
int from = chunkNumber * chunkSize;
int to = from + chunkSize;
int offset = chunkNumber * chunkSize * ROW_SIZE;
int bytesToRead = chunkSize * ROW_SIZE;
// Create a request to download content for computing the sum for this chunk
GetObjectRequest getRequest = new GetObjectRequest(bucketName, filename);
getRequest.setRange(offset, offset + bytesToRead - 1);
// Download content
S3Object obj = storage.getObject(getRequest);
InputStream inputStream = obj.getObjectContent();
InputStreamReader inputStreamReader = null;
BufferedReader reader = null;
try {
String line = null;
inputStreamReader = new InputStreamReader(inputStream);
reader = new BufferedReader(inputStreamReader);
// Compute sum for downloaded content
while ((line = reader.readLine()) != null) {
sum += Integer.parseInt(line);
}
}
finally {
if (reader != null)
reader.close();
if (inputStreamReader != null)
inputStreamReader.close();
if (inputStream != null)
inputStream.close();
}
System.out.printf("Sum from '%d' to '%d' is: '%d'\n", from + 1, to, sum);
return sum;
}
@Override
public void reportResult(double average) {
System.out.printf("Average is: %6.2f.\n", average);
}
}
| 3,346 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/ActivityHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
/**
* This is the process which hosts all Activities in this sample
*/
public class ActivityHost {
private static final String ACTIVITIES_TASK_LIST = "AverageCalculator";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
AmazonS3 s3Client = configHelper.createS3Client();
String domain = configHelper.getDomain();
final ActivityWorker worker = new ActivityWorker(swfService, domain, ACTIVITIES_TASK_LIST);
// Create activity implementations
AverageCalculatorActivitiesImpl avgCalcActivitiesImpl = new AverageCalculatorActivitiesImpl(s3Client);
worker.addActivitiesImplementation(avgCalcActivitiesImpl);
worker.start();
System.out.println("Activity Worker Started for Task List: " + worker.getTaskListToPoll());
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
worker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Activity Worker Exited.");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,347 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/AverageCalculatorActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import java.io.IOException;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activity;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
import com.amazonaws.services.simpleworkflow.flow.annotations.ExponentialRetry;
@Activities(version = "1.0")
@ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 30, defaultTaskStartToCloseTimeoutSeconds = 30)
public interface AverageCalculatorActivities {
@Activity(name = "computeDataSize")
int computeDataSizeForInputData(String bucketName, String filename);
@ExponentialRetry(initialRetryIntervalSeconds = 10, maximumAttempts = 10)
int computeSumForChunk(String bucketName, String filename, int chunkNumber, int chunkSize) throws IOException;
void reportResult(double average);
}
| 3,348 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/WorkflowExecutionStarter.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
public class WorkflowExecutionStarter {
private static AmazonSimpleWorkflow swfService;
private static String domain;
public static void main(String[] args) throws Exception {
// Load configuration
ConfigHelper configHelper = ConfigHelper.createConfig();
// Create the client for Simple Workflow Service
swfService = configHelper.createSWFClient();
domain = configHelper.getDomain();
// Start Workflow execution
String bucketName = configHelper.getValueFromConfig(SplitMergeConfigKeys.S3_BUCKET_NAME);
String fileName = configHelper.getValueFromConfig(SplitMergeConfigKeys.S3_INPUT_FILENAME);
String val = configHelper.getValueFromConfig(SplitMergeConfigKeys.NUMBER_OF_WORKERS);
int numberOfWorkers = Integer.parseInt(val);
AverageCalculatorWorkflowClientExternalFactory clientFactory = new AverageCalculatorWorkflowClientExternalFactoryImpl(swfService, domain);
AverageCalculatorWorkflowClientExternal workflow = clientFactory.getClient();
workflow.average(bucketName, fileName, numberOfWorkers);
// WorkflowExecution is available after workflow creation
WorkflowExecution workflowExecution = workflow.getWorkflowExecution();
System.out.println("Started periodic workflow with workflowId=\"" + workflowExecution.getWorkflowId()
+ "\" and runId=\"" + workflowExecution.getRunId() + "\"");
System.exit(0);
}
} | 3,349 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/splitmerge/WorkflowHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.splitmerge;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
public class WorkflowHost {
private static final String DECISION_TASK_LIST = "AverageCalculatorWorkflow";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
final WorkflowWorker worker = new WorkflowWorker(swfService, domain, DECISION_TASK_LIST);
worker.addWorkflowImplementationType(AverageCalculatorWorkflowImpl.class);
worker.start();
System.out.println("Workflow Host Service Started...");
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
worker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Workflow Host Service Terminated...");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,350 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkflowMaxAllowedHeavyImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import com.amazonaws.services.simpleworkflow.flow.DecisionContextProviderImpl;
import com.amazonaws.services.simpleworkflow.flow.WorkflowContext;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowComponentImplementationVersion;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowComponentImplementationVersions;
@WorkflowComponentImplementationVersions(value = {
@WorkflowComponentImplementationVersion(componentName = "HEAVY_LOAD_COMPONENT", minimumSupported = 0, maximumSupported = 1) })
public class ComponentVersionWorkflowMaxAllowedHeavyImpl extends ComponentVersionWorkflowImplBase {
private WorkflowContext context = new DecisionContextProviderImpl().getDecisionContext().getWorkflowContext();
@Override
public void runComponentVersionWorkflow(String input) {
if (context.isImplementationVersion("HEAVY_LOAD_COMPONENT", 1)) {
doHeavyLoadWork();
} else {
doLightLoadWork();
}
}
}
| 3,351 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkerHostBase.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
public abstract class ComponentVersionWorkerHostBase {
private static final String ACTIVITIES_TASK_LIST = "ComponentVersionActivityTaskList";
private static final String DECISION_TASK_LIST = "ComponentVersionDecisionTaskList";
private ConfigHelper configHelper;
public ComponentVersionWorkerHostBase() throws IllegalArgumentException, IOException {
configHelper = ConfigHelper.createConfig();
}
protected ActivityWorker startActivityWorker()
throws InstantiationException, IllegalAccessException, SecurityException, NoSuchMethodException {
final ActivityWorker activityWorker = new ActivityWorker(configHelper.createSWFClient(),
configHelper.getDomain(), ACTIVITIES_TASK_LIST);
ComponentVersionActivities componentVersionActivitiesImpl = new ComponentVersionActivitiesImpl();
activityWorker.addActivitiesImplementation(componentVersionActivitiesImpl);
activityWorker.start();
System.out.println("Started activity worker");
return activityWorker;
}
protected WorkflowWorker startWorkflowWorker(
Class<? extends ComponentVersionWorkflow> componentVersionWorkflowImplClass)
throws InstantiationException, IllegalAccessException {
final WorkflowWorker workflowWorker = new WorkflowWorker(configHelper.createSWFClient(),
configHelper.getDomain(), DECISION_TASK_LIST);
workflowWorker.addWorkflowImplementationType(componentVersionWorkflowImplClass);
workflowWorker.start();
System.out.println("Started workflow worker: " + componentVersionWorkflowImplClass.getName());
return workflowWorker;
}
protected void addShutDownHook(ActivityWorker activityWorker, WorkflowWorker workflowWorker) {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
workflowWorker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Workflow Host Service Terminated...");
activityWorker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Activity Worker Exited.");
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
} catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,352 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkflowImplBase.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
public abstract class ComponentVersionWorkflowImplBase implements ComponentVersionWorkflow {
protected ComponentVersionActivitiesClient activityClient = new ComponentVersionActivitiesClientImpl();
protected void doLightLoadWork() {
Promise<Void> waitFor = activityClient.doSomeWork();
activityClient.doMoreWork(waitFor);
}
protected void doHeavyLoadWork() {
Promise<Void> waitFor = activityClient.doMoreWork();
activityClient.doALotOfWork(waitFor);
}
}
| 3,353 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkflowLightImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
public class ComponentVersionWorkflowLightImpl extends ComponentVersionWorkflowImplBase {
@Override
public void runComponentVersionWorkflow(String input) {
doLightLoadWork();
}
}
| 3,354 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkflow.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import com.amazonaws.services.simpleworkflow.flow.annotations.Execute;
import com.amazonaws.services.simpleworkflow.flow.annotations.Workflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowRegistrationOptions;
@Workflow
@WorkflowRegistrationOptions(defaultExecutionStartToCloseTimeoutSeconds = 600)
public interface ComponentVersionWorkflow {
@Execute(version = "1.0")
void runComponentVersionWorkflow(String input);
}
| 3,355 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkflowHeavyImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
public class ComponentVersionWorkflowHeavyImpl extends ComponentVersionWorkflowImplBase {
@Override
public void runComponentVersionWorkflow(String input) {
doHeavyLoadWork();
}
}
| 3,356 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activity;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
@Activities
public interface ComponentVersionActivities {
@Activity(name = "doSomeWork", version = "1.0")
@ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 30,
defaultTaskStartToCloseTimeoutSeconds = 240)
void doSomeWork();
@Activity(name = "doMoreWork", version = "1.0")
@ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 30,
defaultTaskStartToCloseTimeoutSeconds = 240)
void doMoreWork();
@Activity(name = "doALotOfWork", version = "1.0")
@ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 30,
defaultTaskStartToCloseTimeoutSeconds = 240)
void doALotOfWork();
}
| 3,357 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkflowMaxAllowedLightImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import com.amazonaws.services.simpleworkflow.flow.DecisionContextProviderImpl;
import com.amazonaws.services.simpleworkflow.flow.WorkflowContext;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowComponentImplementationVersion;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowComponentImplementationVersions;
@WorkflowComponentImplementationVersions(value = {
@WorkflowComponentImplementationVersion(componentName = "HEAVY_LOAD_COMPONENT", minimumSupported = 0, maximumSupported = 1, maximumAllowed = 0) })
public class ComponentVersionWorkflowMaxAllowedLightImpl extends ComponentVersionWorkflowImplBase {
private WorkflowContext context = new DecisionContextProviderImpl().getDecisionContext().getWorkflowContext();
@Override
public void runComponentVersionWorkflow(String input) {
if (context.isImplementationVersion("HEAVY_LOAD_COMPONENT", 1)) {
doHeavyLoadWork();
} else {
doLightLoadWork();
}
}
}
| 3,358 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionActivitiesImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
public class ComponentVersionActivitiesImpl implements ComponentVersionActivities {
@Override
public void doSomeWork() {
System.out.println("Start doing some work.");
doWork(5);
System.out.println("Some work finished.");
}
@Override
public void doMoreWork() {
System.out.println("Start doing more work.");
doWork(10);
System.out.println("More work finished.");
}
@Override
public void doALotOfWork() {
System.out.println("Start doing a lot of work.");
doWork(15);
System.out.println("A lot of work finished.");
}
private void doWork(int x) {
try {
Thread.sleep(x * 1000L);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
| 3,359 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionMaxAllowedLightWorkerHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import java.io.IOException;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
public class ComponentVersionMaxAllowedLightWorkerHost extends ComponentVersionWorkerHostBase {
public ComponentVersionMaxAllowedLightWorkerHost() throws IllegalArgumentException, IOException {
super();
}
public static void main(String[] args) throws Exception {
ComponentVersionMaxAllowedLightWorkerHost host = new ComponentVersionMaxAllowedLightWorkerHost();
ActivityWorker activityWorker = host.startActivityWorker();
WorkflowWorker workflowWorker = host.startWorkflowWorker(ComponentVersionWorkflowMaxAllowedLightImpl.class);
host.addShutDownHook(activityWorker, workflowWorker);
}
}
| 3,360 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionMaxAllowedHeavyWorkerHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import java.io.IOException;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
public class ComponentVersionMaxAllowedHeavyWorkerHost extends ComponentVersionWorkerHostBase {
public ComponentVersionMaxAllowedHeavyWorkerHost() throws IllegalArgumentException, IOException {
super();
}
public static void main(String[] args) throws Exception {
ComponentVersionMaxAllowedLightWorkerHost host = new ComponentVersionMaxAllowedLightWorkerHost();
ActivityWorker activityWorker = host.startActivityWorker();
WorkflowWorker workflowWorker = host.startWorkflowWorker(ComponentVersionWorkflowMaxAllowedHeavyImpl.class);
host.addShutDownHook(activityWorker, workflowWorker);
}
}
| 3,361 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionWorkflowStarter.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
public class ComponentVersionWorkflowStarter {
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
ComponentVersionWorkflowClientExternalFactory clientFactory = new ComponentVersionWorkflowClientExternalFactoryImpl(
swfService, domain);
ComponentVersionWorkflowClientExternal workflow = clientFactory.getClient();
// Start Wrokflow Execution
System.out.println("Start running component version workflow!");
workflow.runComponentVersionWorkflow("Get this work done!");
// WorkflowExecution is available after workflow creation
WorkflowExecution workflowExecution = workflow.getWorkflowExecution();
System.out.println("Started helloWorld workflow with workflowId=\"" + workflowExecution.getWorkflowId()
+ "\" and runId=\"" + workflowExecution.getRunId() + "\"");
}
}
| 3,362 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionLightWorkerHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import java.io.IOException;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
public class ComponentVersionLightWorkerHost extends ComponentVersionWorkerHostBase {
public ComponentVersionLightWorkerHost() throws IllegalArgumentException, IOException {
super();
}
public static void main(String[] args) throws Exception {
ComponentVersionLightWorkerHost host = new ComponentVersionLightWorkerHost();
ActivityWorker activityWorker = host.startActivityWorker();
WorkflowWorker workflowWorker = host.startWorkflowWorker(ComponentVersionWorkflowLightImpl.class);
host.addShutDownHook(activityWorker, workflowWorker);
}
}
| 3,363 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/componentversion/ComponentVersionHeavyWorkerHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.componentversion;
import java.io.IOException;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
public class ComponentVersionHeavyWorkerHost extends ComponentVersionWorkerHostBase {
public ComponentVersionHeavyWorkerHost() throws IllegalArgumentException, IOException {
super();
}
public static void main(String[] args) throws Exception {
ComponentVersionLightWorkerHost host = new ComponentVersionLightWorkerHost();
ActivityWorker activityWorker = host.startActivityWorker();
WorkflowWorker workflowWorker = host.startWorkflowWorker(ComponentVersionWorkflowHeavyImpl.class);
host.addShutDownHook(activityWorker, workflowWorker);
}
}
| 3,364 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/BookingConfigKeys.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
public class BookingConfigKeys {
public static final String ACTIVITY_WORKER_TASKLIST = "Booking.Activity.Worker.TaskList";
public static final String WORKFLOW_WORKER_TASKLIST = "Booking.Workflow.Worker.TaskList";
public static final String WORKFLOW_INPUT_REQUESTID_KEY = "Booking.Input.RequestId";
public static final String WORKFLOW_INPUT_CUSTOMERID_KEY = "Booking.Input.CustomerId";
}
| 3,365 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/BookingActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
import com.amazonaws.services.simpleworkflow.flow.common.FlowConstants;
@Activities(version = "1.0")
@ActivityRegistrationOptions(
defaultTaskHeartbeatTimeoutSeconds = FlowConstants.NONE,
defaultTaskScheduleToCloseTimeoutSeconds = 300,
defaultTaskScheduleToStartTimeoutSeconds = 60,
defaultTaskStartToCloseTimeoutSeconds = 60)
public interface BookingActivities {
void reserveCar(int requestId);
void reserveAirline(int requestId);
void sendConfirmationActivity(int customerId);
}
| 3,366 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/ActivityHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
/**
* This is the process which hosts all Activities in this sample
*/
public class ActivityHost {
private static AmazonSimpleWorkflow swfService;
private static String domain;
private static long domainRetentionPeriodInDays;
private static ActivityWorker worker;
private static ActivityHost activityWorker;
// ActivityWorker Factory method
public synchronized static ActivityHost getActivityHost() {
if (activityWorker == null) {
activityWorker = new ActivityHost();
}
return activityWorker;
}
public static void main(String[] args) throws Exception {
// load configuration
ConfigHelper configHelper = loadConfig();
// Start Activity Worker
getActivityHost().startWorker(configHelper);
// Add a Shutdown hook to close ActivityWorker
addShutDownHook();
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
private void startWorker(ConfigHelper configHelper) throws Exception {
// Create activity implementations
BookingActivities bookingActivitiesImpl = new BookingActivitiesImpl();
// Start worker to poll the common task list
String taskList = configHelper.getValueFromConfig(BookingConfigKeys.ACTIVITY_WORKER_TASKLIST);
worker = new ActivityWorker(swfService, domain, taskList);
worker.setDomainRetentionPeriodInDays(domainRetentionPeriodInDays);
worker.setRegisterDomain(true);
worker.addActivitiesImplementation(bookingActivitiesImpl);
worker.start();
System.out.println("Worker Started for Activity Task List: " + taskList);
}
private void stopWorker() throws InterruptedException {
System.out.println("Stopping Worker...");
worker.shutdownAndAwaitTermination(10, TimeUnit.SECONDS);
System.out.println("Worker Stopped...");
}
static ConfigHelper loadConfig() throws IllegalArgumentException, IOException{
ConfigHelper configHelper = ConfigHelper.createConfig();
swfService = configHelper.createSWFClient();
domain = configHelper.getDomain();
domainRetentionPeriodInDays = configHelper.getDomainRetentionPeriodInDays();
return configHelper;
}
static void addShutDownHook(){
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
public void run() {
try {
getActivityHost().stopWorker();
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
}));
}
static String getHostName() {
try {
InetAddress addr = InetAddress.getLocalHost();
return addr.getHostName();
}
catch (UnknownHostException e) {
throw new Error(e);
}
}
}
| 3,367 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/BookingWorklfowTest.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
import java.util.ArrayList;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
import com.amazonaws.services.simpleworkflow.flow.junit.AsyncAssert;
import com.amazonaws.services.simpleworkflow.flow.junit.FlowBlockJUnit4ClassRunner;
import com.amazonaws.services.simpleworkflow.flow.junit.WorkflowTest;
@RunWith(FlowBlockJUnit4ClassRunner.class)
public class BookingWorklfowTest {
@Rule
public WorkflowTest workflowTest = new WorkflowTest();
List<String> trace;
private BookingWorkflowClientFactory workflowFactory = new BookingWorkflowClientFactoryImpl();
@Before
public void setUp() throws Exception {
trace = new ArrayList<String>();
// Register activity implementation to be used during test run
// In real life some mocking framework to be used
BookingActivities activities = new BookingActivities() {
@Override
public void sendConfirmationActivity(int customerId) {
trace.add("sendConfirmation-" + customerId);
}
@Override
public void reserveCar(int requestId) {
trace.add("reserveCar-" + requestId);
}
@Override
public void reserveAirline(int requestId) {
trace.add("reserveAirline-" + requestId);
}
};
workflowTest.addActivitiesImplementation(activities);
workflowTest.addWorkflowImplementationType(BookingWorkflowImpl.class);
}
@After
public void tearDown() throws Exception {
trace = null;
}
@Test
public void testReserveBoth() {
BookingWorkflowClient workflow = workflowFactory.getClient();
Promise<Void> booked = workflow.makeBooking(123, 345, true, true);
List<String> expected = new ArrayList<String>();
expected.add("reserveCar-123");
expected.add("reserveAirline-123");
expected.add("sendConfirmation-345");
AsyncAssert.assertEqualsWaitFor(expected, trace, booked);
}
@Test
public void testReserveAir() {
BookingWorkflowClient workflow = workflowFactory.getClient();
Promise<Void> booked = workflow.makeBooking(123, 345, true, false);
List<String> expected = new ArrayList<String>();
expected.add("reserveAirline-123");
expected.add("sendConfirmation-345");
AsyncAssert.assertEqualsWaitFor(expected, trace, booked);
}
@Test
public void testReserveCar() {
BookingWorkflowClient workflow = workflowFactory.getClient();
Promise<Void> booked = workflow.makeBooking(123, 345, false, true);
List<String> expected = new ArrayList<String>();
expected.add("reserveCar-123");
expected.add("sendConfirmation-345");
AsyncAssert.assertEqualsWaitFor(expected, trace, booked);
}
@Test
public void testReserveNone() {
BookingWorkflowClient workflow = workflowFactory.getClient();
Promise<Void> booked = workflow.makeBooking(123, 345, false, false);
List<String> expected = new ArrayList<String>();
expected.add("sendConfirmation-345");
AsyncAssert.assertEqualsWaitFor(expected, trace, booked);
}
}
| 3,368 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/BookingWorkflowImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
public class BookingWorkflowImpl implements BookingWorkflow {
private final BookingActivitiesClient client = new BookingActivitiesClientImpl();
@Override
public void makeBooking(int requestID, int customerID, boolean reserveAir, boolean reserveCar) {
Promise<Void> carReservation = null;
if (reserveCar) {
carReservation = client.reserveCar(requestID);
}
Promise<Void> airReservation = null;
if (reserveAir) {
airReservation = client.reserveAirline(requestID);
}
// Relies on null Promise parameter considered immediately ready
client.sendConfirmationActivity(customerID, carReservation, airReservation);
}
}
| 3,369 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/BookingWorkflow.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
import com.amazonaws.services.simpleworkflow.flow.annotations.Execute;
import com.amazonaws.services.simpleworkflow.flow.annotations.Workflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowRegistrationOptions;
@Workflow
@WorkflowRegistrationOptions(defaultExecutionStartToCloseTimeoutSeconds = 600, defaultTaskStartToCloseTimeoutSeconds = 10)
public interface BookingWorkflow {
@Execute(name = "MakeBooking", version = "1.0")
void makeBooking(int requestID, int customerID, boolean reserveAir, boolean reserveCar);
}
| 3,370 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/WorkflowExecutionStarter.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
public class WorkflowExecutionStarter {
private static AmazonSimpleWorkflow swfService;
private static String domain;
public static void main(String[] args) throws Exception {
// Load configuration
ConfigHelper configHelper = ConfigHelper.createConfig();
// Create the client for Simple Workflow Service
swfService = configHelper.createSWFClient();
domain = configHelper.getDomain();
// Start Workflow instance
int requestId = Integer.parseInt(configHelper.getValueFromConfig(BookingConfigKeys.WORKFLOW_INPUT_REQUESTID_KEY));
int customerId = Integer.parseInt(configHelper.getValueFromConfig(BookingConfigKeys.WORKFLOW_INPUT_CUSTOMERID_KEY));
BookingWorkflowClientExternalFactory clientFactory = new BookingWorkflowClientExternalFactoryImpl(swfService, domain);
BookingWorkflowClientExternal workflow = clientFactory.getClient();
workflow.makeBooking(requestId, customerId, true, true);
System.exit(0);
}
}
| 3,371 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/WorkflowHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
public class WorkflowHost {
private static AmazonSimpleWorkflow swfService;
private static String domain;
private static long domainRetentionPeriodInDays;
private static WorkflowWorker worker;
private static WorkflowHost host;
// Factory method for Workflow Host
public synchronized static WorkflowHost getWorkflowHost() {
if (host == null) {
host = new WorkflowHost();
}
return host;
}
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = loadConfiguration();
getWorkflowHost().startWorkflowWorker(configHelper);
// Add a Shutdown hook to close WorkflowWorker
addShutDownHook();
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
private void startWorkflowWorker(ConfigHelper configHelper) throws Exception {
System.out.println("Starting Workflow Host Service...");
String taskList = configHelper.getValueFromConfig(BookingConfigKeys.WORKFLOW_WORKER_TASKLIST);
worker = new WorkflowWorker(swfService, domain, taskList);
worker.setDomainRetentionPeriodInDays(domainRetentionPeriodInDays);
worker.setRegisterDomain(true);
worker.addWorkflowImplementationType(BookingWorkflowImpl.class);
// Start the worker threads
worker.start();
System.out.println("Workflow Host Service Started...");
}
private void stopHost() throws InterruptedException {
System.out.println("Stopping Workflow Host Service...");
worker.shutdownNow();
swfService.shutdown();
worker.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
System.out.println("Workflow Host Service Stopped...");
}
static ConfigHelper loadConfiguration() throws IllegalArgumentException, IOException{
ConfigHelper configHelper = ConfigHelper.createConfig();
// Create the client for Simple Workflow Service and S3 Service
swfService = configHelper.createSWFClient();
domain = configHelper.getDomain();
domainRetentionPeriodInDays = configHelper.getDomainRetentionPeriodInDays();
configHelper.createS3Client();
return configHelper;
}
static void addShutDownHook(){
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
public void run() {
try {
getWorkflowHost().stopHost();
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
}));
}
}
| 3,372 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/booking/BookingActivitiesImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.booking;
public class BookingActivitiesImpl implements BookingActivities {
@Override
public void reserveCar(int requestId) {
System.out.printf("Reserving car for Request ID: %d...\n", requestId);
}
@Override
public void reserveAirline(int requestId) {
System.out.printf("Reserving airline for Request ID: %d...\n", requestId);
}
@Override
public void sendConfirmationActivity(int customerId){
System.out.printf("Sending notification to Customer '%d'...\n", customerId);
}
}
| 3,373 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/PeriodicWorkflowOptions.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
public class PeriodicWorkflowOptions {
private long executionPeriodSeconds;
private boolean waitForActivityCompletion;
private long continueAsNewAfterSeconds;
private long completeAfterSeconds;
public long getExecutionPeriodSeconds() {
return executionPeriodSeconds;
}
/**
* Interval between activity executions
*/
public void setExecutionPeriodSeconds(long executionPeriodSeconds) {
this.executionPeriodSeconds = executionPeriodSeconds;
}
public boolean isWaitForActivityCompletion() {
return waitForActivityCompletion;
}
/**
* Should the new activity execution be delayed until the previous one
* completion?
*/
public void setWaitForActivityCompletion(boolean waitForActivityCompletion) {
this.waitForActivityCompletion = waitForActivityCompletion;
}
public long getContinueAsNewAfterSeconds() {
return continueAsNewAfterSeconds;
}
/**
* how frequently the new run of the workflow (whith an empty history)
* should be created.
*/
public void setContinueAsNewAfterSeconds(long continueAsNewAfterSeconds) {
this.continueAsNewAfterSeconds = continueAsNewAfterSeconds;
}
public long getCompleteAfterSeconds() {
return completeAfterSeconds;
}
/**
* Complete the workflow without creating a new run after the specified
* interval.
*/
public void setCompleteAfterSeconds(long completeAfterSeconds) {
this.completeAfterSeconds = completeAfterSeconds;
}
}
| 3,374 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/ErrorReportingActivitiesImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProvider;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProviderImpl;
public class ErrorReportingActivitiesImpl implements ErrorReportingActivities {
private final ActivityExecutionContextProvider contextProvider;
public ErrorReportingActivitiesImpl() {
this.contextProvider = new ActivityExecutionContextProviderImpl();
}
/**
* For unit testing or IoC
*/
public ErrorReportingActivitiesImpl(ActivityExecutionContextProvider contextProvider) {
this.contextProvider = contextProvider;
}
@Override
public void reportFailure(Throwable e) {
String runId = contextProvider.getActivityExecutionContext().getTask().getWorkflowExecution().getRunId();
System.err.println("Run Id: " + runId + ", Failure in periodic task:");
e.printStackTrace();
}
}
| 3,375 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/PeriodicWorkflow.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.Execute;
import com.amazonaws.services.simpleworkflow.flow.annotations.Workflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowRegistrationOptions;
import com.amazonaws.services.simpleworkflow.model.ActivityType;
@Workflow
@WorkflowRegistrationOptions(defaultExecutionStartToCloseTimeoutSeconds = 300, defaultTaskStartToCloseTimeoutSeconds = 10)
public interface PeriodicWorkflow {
/**
* Start workflow that executes activity according to options.
*
* @param activity
* activity type to execute
* @param options
* define the schedule of the execution.
*/
@Execute(name = "PeriodicWorkflow", version = "1.0")
void startPeriodicWorkflow(ActivityType activity, Object[] activityArguments, PeriodicWorkflowOptions options);
}
| 3,376 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/PeriodicWorkflowActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
@Activities(version="1.0")
@ActivityRegistrationOptions(
defaultTaskScheduleToStartTimeoutSeconds = 100,
defaultTaskStartToCloseTimeoutSeconds = 10)
public interface PeriodicWorkflowActivities {
void doSomeWork(String parameter);
}
| 3,377 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/ActivityHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
/**
* This is the process which hosts all Activities in this sample
*/
public class ActivityHost {
private static final String ACTIVITIES_TASK_LIST = "Periodic";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
final ActivityWorker worker = new ActivityWorker(swfService, domain, ACTIVITIES_TASK_LIST);
// Create activity implementations
PeriodicWorkflowActivities periodicActivitiesImpl = new PeriodicWorkflowActivitiesImpl();
worker.addActivitiesImplementation(periodicActivitiesImpl);
worker.start();
System.out.println("Activity Worker Started for Task List: " + worker.getTaskListToPoll());
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
worker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Activity Worker Exited.");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,378 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/ErrorReportingActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
@Activities(version="1.0")
@ActivityRegistrationOptions(
defaultTaskScheduleToStartTimeoutSeconds = 100,
defaultTaskStartToCloseTimeoutSeconds = 10)
public interface ErrorReportingActivities {
void reportFailure(Throwable e);
}
| 3,379 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/PeriodicWorkflowTest.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContext;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProvider;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProviderImpl;
import com.amazonaws.services.simpleworkflow.flow.core.TryFinally;
import com.amazonaws.services.simpleworkflow.flow.junit.FlowBlockJUnit4ClassRunner;
import com.amazonaws.services.simpleworkflow.flow.junit.WorkflowTest;
import com.amazonaws.services.simpleworkflow.model.ActivityType;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
// Have to use Flow JUnit runner if timeout is specified on any @Test
// annotation.
@RunWith(FlowBlockJUnit4ClassRunner.class)
public class PeriodicWorkflowTest {
private final class TestPeriodicWorkflowActivities implements PeriodicWorkflowActivities {
private final ActivityExecutionContextProvider contextProvider = new ActivityExecutionContextProviderImpl();
private int workCount;
private int currentRunWorkCount;
private String runId;
@Override
public void doSomeWork(String parameter) {
// Reset counter on the new run which changes when workflow continues as new
ActivityExecutionContext activityExecutionContext = contextProvider.getActivityExecutionContext();
WorkflowExecution workflowExecution = activityExecutionContext.getWorkflowExecution();
String runId = workflowExecution.getRunId();
if (this.runId != null && !runId.equals(this.runId)) {
currentRunWorkCount = 0;
}
this.runId = runId;
workCount++;
currentRunWorkCount++;
}
public int getWorkCount() {
return workCount;
}
public int getCurrentRunWorkCount() {
return currentRunWorkCount;
}
}
private final class TestErrorReportingActivities implements ErrorReportingActivities {
private Throwable failure;
@Override
public void reportFailure(Throwable e) {
failure = e;
}
public Throwable getFailure() {
return failure;
}
}
@Rule
public WorkflowTest workflowTest = new WorkflowTest();
private PeriodicWorkflowClientFactory workflowClientFactory = new PeriodicWorkflowClientFactoryImpl();
private TestPeriodicWorkflowActivities periodicActivitiesImplementation;
private TestErrorReportingActivities errorReportingActivitiesImplementation;
@Before
public void setUp() throws Exception {
periodicActivitiesImplementation = new TestPeriodicWorkflowActivities();
workflowTest.addActivitiesImplementation(periodicActivitiesImplementation);
errorReportingActivitiesImplementation = new TestErrorReportingActivities();
workflowTest.addActivitiesImplementation(errorReportingActivitiesImplementation);
workflowTest.addWorkflowImplementationType(PeriodicWorkflowImpl.class);
workflowTest.setDisableOutstandingTasksCheck(true);
}
@After
public void tearDown() throws Exception {
}
@Test(timeout = 2000)
public void testPeriodic() {
workflowTest.setClockAccelerationCoefficient(100);
workflowTest.setDisableOutstandingTasksCheck(true);
final PeriodicWorkflowClient workflow = workflowClientFactory.getClient();
final PeriodicWorkflowOptions options = new PeriodicWorkflowOptions();
options.setExecutionPeriodSeconds(10);
options.setContinueAsNewAfterSeconds(30);
options.setCompleteAfterSeconds(120);
options.setWaitForActivityCompletion(true);
final ActivityType activityType = new ActivityType();
activityType.setName("PeriodicWorkflowActivities.doSomeWork");
activityType.setVersion("1.0");
final Object[] parameters = new Object[] { "parameter1" };
new TryFinally() {
@Override
protected void doTry() throws Throwable {
workflow.startPeriodicWorkflow(activityType, parameters, options);
}
@Override
protected void doFinally() throws Throwable {
Assert.assertEquals(120 / 10, periodicActivitiesImplementation.getWorkCount());
Assert.assertEquals(30 / 10, periodicActivitiesImplementation.getCurrentRunWorkCount());
Assert.assertNull(errorReportingActivitiesImplementation.getFailure());
}
};
}
}
| 3,380 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/WorkflowExecutionStarter.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
import com.amazonaws.services.simpleworkflow.model.ActivityType;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecutionAlreadyStartedException;
public class WorkflowExecutionStarter {
private static AmazonSimpleWorkflow swfService;
private static String domain;
public static void main(String[] args) throws Exception {
// Load configuration
ConfigHelper configHelper = ConfigHelper.createConfig();
// Create the client for Simple Workflow Service
swfService = configHelper.createSWFClient();
domain = configHelper.getDomain();
// Start Workflow execution
PeriodicWorkflowClientExternalFactory clientFactory = new PeriodicWorkflowClientExternalFactoryImpl(swfService, domain);
// Passing instance id to ensure that only one periodic workflow can be active at a time.
// Use different id for each schedule.
PeriodicWorkflowClientExternal workflow = clientFactory.getClient("periodic1");
// Execute activity every two 10 seconds, wait for it to complete before starting the new one,
// create new run every 30 seconds and stop the workflow after two minutes.
// Obviously these periods are so low to make example run fast enough to not be boring.
// In production case there is no need to create new runs so frequently.
PeriodicWorkflowOptions options = new PeriodicWorkflowOptions();
options.setExecutionPeriodSeconds(10);
options.setContinueAsNewAfterSeconds(30);
options.setCompleteAfterSeconds(120);
options.setWaitForActivityCompletion(true);
ActivityType activityType = new ActivityType();
activityType.setName("PeriodicWorkflowActivities.doSomeWork");
activityType.setVersion("1.0");
Object[] parameters = new Object[] { "parameter1" };
try {
workflow.startPeriodicWorkflow(activityType, parameters, options);
// WorkflowExecution is available after workflow creation
WorkflowExecution workflowExecution = workflow.getWorkflowExecution();
System.out.println("Started periodic workflow with workflowId=\"" + workflowExecution.getWorkflowId()
+ "\" and runId=\"" + workflowExecution.getRunId() + "\"");
}
catch (WorkflowExecutionAlreadyStartedException e) {
// It is expected to get this exception if start is called before workflow run is completed.
System.out.println("Periodic workflow with workflowId=\"" + workflow.getWorkflowExecution().getWorkflowId()
+ " is already running");
}
System.exit(0);
}
} | 3,381 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/PeriodicWorkflowActivitiesImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import java.util.Random;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProvider;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProviderImpl;
public class PeriodicWorkflowActivitiesImpl implements PeriodicWorkflowActivities {
private final ActivityExecutionContextProvider contextProvider;
public PeriodicWorkflowActivitiesImpl() {
this.contextProvider = new ActivityExecutionContextProviderImpl();
}
/**
* For unit testing or IoC
*/
public PeriodicWorkflowActivitiesImpl(ActivityExecutionContextProvider contextProvider) {
this.contextProvider = contextProvider;
}
@Override
// Activity takes random time to execute to show that it is waited if workflow parameter waitForActivityCompletion
// is set to true
public void doSomeWork(String parameter) {
String runId = contextProvider.getActivityExecutionContext().getTask().getWorkflowExecution().getRunId();
Random r = new Random();
long delay = r.nextInt(3000);
System.out.println("Run Id: " + runId + ", Do some periodic task here for " + delay + " milliseconds with parameter="
+ parameter);
try {
Thread.sleep(delay);
}
catch (InterruptedException e) {
}
}
}
| 3,382 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/WorkflowHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
public class WorkflowHost {
private static final String DECISION_TASK_LIST = "PeriodicWorkflow";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
final WorkflowWorker worker = new WorkflowWorker(swfService, domain, DECISION_TASK_LIST);
worker.addWorkflowImplementationType(PeriodicWorkflowImpl.class);
worker.start();
System.out.println("Workflow Host Service Started...");
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
worker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Workflow Host Service Terminated...");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,383 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/periodicworkflow/PeriodicWorkflowImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.periodicworkflow;
import com.amazonaws.services.simpleworkflow.flow.DecisionContextProvider;
import com.amazonaws.services.simpleworkflow.flow.DecisionContextProviderImpl;
import com.amazonaws.services.simpleworkflow.flow.DynamicActivitiesClient;
import com.amazonaws.services.simpleworkflow.flow.DynamicActivitiesClientImpl;
import com.amazonaws.services.simpleworkflow.flow.WorkflowClock;
import com.amazonaws.services.simpleworkflow.flow.annotations.Asynchronous;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
import com.amazonaws.services.simpleworkflow.flow.core.TryCatchFinally;
import com.amazonaws.services.simpleworkflow.model.ActivityType;
public class PeriodicWorkflowImpl implements PeriodicWorkflow {
private static final int SECOND = 1000;
/**
* This is needed to keep the decider logic deterministic as using
* System.currentTimeMillis() in your decider logic is not.
* WorkflowClock.currentTimeMillis() should be used instead.
*/
private final WorkflowClock clock;
private final DynamicActivitiesClient activities;
private final ErrorReportingActivitiesClient errorReporting;
/**
* Used to create new run of the periodic workflow to reset history. This
* allows "infinite" workflows.
*/
private final PeriodicWorkflowSelfClient selfClient;
private ActivityType activityType;
private PeriodicWorkflowOptions options;
private Object[] activityArguments;
public PeriodicWorkflowImpl() {
DecisionContextProvider contextProvider = new DecisionContextProviderImpl();
clock = contextProvider.getDecisionContext().getWorkflowClock();
activities = new DynamicActivitiesClientImpl();
errorReporting = new ErrorReportingActivitiesClientImpl();
selfClient = new PeriodicWorkflowSelfClientImpl();
}
/**
* Constructor used for unit testing or configuration through IOC container
*/
public PeriodicWorkflowImpl(WorkflowClock clock, DynamicActivitiesClient activities,
ErrorReportingActivitiesClient errorReporting, PeriodicWorkflowSelfClient selfClient) {
this.clock = clock;
this.activities = activities;
this.errorReporting = errorReporting;
this.selfClient = selfClient;
}
@Override
public void startPeriodicWorkflow(final ActivityType activity, final Object[] activityArguments,
final PeriodicWorkflowOptions options) {
final long startTime = clock.currentTimeMillis();
this.activityType = activity;
this.activityArguments = activityArguments;
this.options = options;
// Use TryCatch to ensure that workflow is not going to fail as it causes new run not being created
new TryCatchFinally() {
@Override
protected void doTry() throws Throwable {
long startTime = clock.currentTimeMillis();
callPeriodicActivity(startTime, Promise.Void(), Promise.Void());
}
@Override
protected void doCatch(Throwable e) throws Throwable {
errorReporting.reportFailure(e);
}
@Override
protected void doFinally() throws Throwable {
long secondsLeft = options.getCompleteAfterSeconds() - (clock.currentTimeMillis() - startTime) / SECOND;
PeriodicWorkflowOptions nextRunOptions = new PeriodicWorkflowOptions();
nextRunOptions.setCompleteAfterSeconds(secondsLeft);
nextRunOptions.setContinueAsNewAfterSeconds(options.getContinueAsNewAfterSeconds());
nextRunOptions.setExecutionPeriodSeconds(options.getExecutionPeriodSeconds());
nextRunOptions.setWaitForActivityCompletion(options.isWaitForActivityCompletion());
options.setCompleteAfterSeconds(secondsLeft);
if (secondsLeft > 0) {
// Request new run of the current workflow instance.
selfClient.startPeriodicWorkflow(activity, activityArguments, nextRunOptions);
}
}
};
}
@Asynchronous
public void callPeriodicActivity(long startTime, Promise<?>... waitFor) {
long currentTime = clock.currentTimeMillis();
if ((currentTime - startTime) < options.getContinueAsNewAfterSeconds() * SECOND) {
// Call activity using dynamic client. Return type is specified as Void as it is not used, but activity that
// returns some other type can be called this way.
Promise<Void> activityCompletion = activities.scheduleActivity(activityType, activityArguments, null, Void.class);
if (!options.isWaitForActivityCompletion()) {
// Promise.Void() returns already ready promise of type Void
activityCompletion = Promise.Void();
}
// Create a timer to re-run your periodic activity after activity completion,
// but not earlier then after delay of executionPeriodSeconds.
// However in real cron workflows, delay should be calculated everytime to run activity at
// a predefined time.
Promise<Void> timer = clock.createTimer(options.getExecutionPeriodSeconds());
callPeriodicActivity(startTime, timer, activityCompletion);
}
}
} | 3,384 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/FileProcessingActivitiesZipImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* This implementation of FileProcessingActivities zips the file
*/
public class FileProcessingActivitiesZipImpl implements FileProcessingActivities {
private final String localDirectory;
public FileProcessingActivitiesZipImpl(String localDirectory) {
this.localDirectory = localDirectory;
}
/**
* This is the Activity implementation that does the zip of a file
*
* @param localDirectory
* Path to the local directory containing the file to zip
* @param fileName
* Name of file to zip
* @param zipFileName
* Filename after zip
*/
@Override
public void processFile(String fileName, String zipFileName) throws Exception {
String fileNameFullPath = localDirectory + fileName;
String zipFileNameFullPath = localDirectory + zipFileName;
System.out.println("processFile activity begin. fileName= " + fileNameFullPath + ", zipFileName= " + zipFileNameFullPath);
final int BUFFER = 1024;
BufferedInputStream origin = null;
ZipOutputStream out = null;
try {
FileOutputStream dest = new FileOutputStream(zipFileNameFullPath);
out = new ZipOutputStream(new BufferedOutputStream(dest));
byte data[] = new byte[BUFFER];
FileInputStream fi = new FileInputStream(fileNameFullPath);
origin = new BufferedInputStream(fi, BUFFER);
ZipEntry entry = new ZipEntry(fileName);
out.putNextEntry(entry);
int count;
while ((count = origin.read(data, 0, BUFFER)) != -1) {
out.write(data, 0, count);
}
}
finally {
if (origin != null)
origin.close();
if (out != null)
out.close();
}
System.out.println("zipFileActivity done.");
}
}
| 3,385 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/FileProcessingConfigKeys.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
public class FileProcessingConfigKeys {
public static final String ACTIVITY_WORKER_LOCALFOLDER = "Activity.Worker.LocalFolder";
public static final String WORKFLOW_INPUT_SOURCEFILENAME_KEY = "Workflow.Input.SourceFileName";
public static final String WORKFLOW_INPUT_SOURCEBUCKETNAME_KEY = "Workflow.Input.SourceBucketName";
public static final String WORKFLOW_INPUT_TARGETFILENAME_KEY = "Workflow.Input.TargetFileName";
public static final String WORKFLOW_INPUT_TARGETBUCKETNAME_KEY = "Workflow.Input.TargetBucketName";
}
| 3,386 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/FileProcessingActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
/**
* Contract for file processing activities
*/
@Activities(version="1.0")
@ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 60, defaultTaskStartToCloseTimeoutSeconds = 60)
public interface FileProcessingActivities {
public void processFile(String inputFileName, String outputFileName) throws Exception;
}
| 3,387 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/ActivityHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.ActivityWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
/**
* This is the process which hosts all Activities in this sample
*/
public class ActivityHost {
private static final String ACTIVITIES_TASK_LIST = "FileProcessing";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
AmazonS3 s3Client = configHelper.createS3Client();
String domain = configHelper.getDomain();
String localFolder = configHelper.getValueFromConfig(FileProcessingConfigKeys.ACTIVITY_WORKER_LOCALFOLDER);
// Start worker to poll the common task list
final ActivityWorker workerForCommonTaskList = new ActivityWorker(swfService, domain, ACTIVITIES_TASK_LIST);
SimpleStoreActivitiesS3Impl storeActivityImpl = new SimpleStoreActivitiesS3Impl(s3Client, localFolder, getHostName());
workerForCommonTaskList.addActivitiesImplementation(storeActivityImpl);
workerForCommonTaskList.start();
System.out.println("Host Service Started for Task List: " + ACTIVITIES_TASK_LIST);
// Start worker to poll the host specific task list
final ActivityWorker workerForHostSpecificTaskList = new ActivityWorker(swfService, domain, getHostName());
workerForHostSpecificTaskList.addActivitiesImplementation(storeActivityImpl);
FileProcessingActivitiesZipImpl processorActivityImpl = new FileProcessingActivitiesZipImpl(localFolder);
workerForHostSpecificTaskList.addActivitiesImplementation(processorActivityImpl);
workerForHostSpecificTaskList.start();
System.out.println("Worker Started for Activity Task List: " + getHostName());
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
workerForCommonTaskList.shutdown();
workerForHostSpecificTaskList.shutdown();
workerForCommonTaskList.awaitTermination(1, TimeUnit.MINUTES);
workerForHostSpecificTaskList.awaitTermination(1, TimeUnit.MINUTES);
System.out.println("Activity Workers Exited.");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
static String getHostName() {
try {
InetAddress addr = InetAddress.getLocalHost();
return addr.getHostName();
}
catch (UnknownHostException e) {
throw new Error(e);
}
}
}
| 3,388 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/SimpleStoreActivitiesS3Impl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContext;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProvider;
import com.amazonaws.services.simpleworkflow.flow.ActivityExecutionContextProviderImpl;
/**
* This is an S3 Store implementation which provides Activities to
* download/upload files from S3
*
*/
public class SimpleStoreActivitiesS3Impl implements SimpleStoreActivities {
private static final int HEARTBEAT_INTERVAL = 60000;
private final ActivityExecutionContextProvider contextProvider = new ActivityExecutionContextProviderImpl();
private final AmazonS3 s3Client;
private final String localDirectory;
private final String hostSpecificTaskList;
public SimpleStoreActivitiesS3Impl(AmazonS3 s3Client, String localDirectory, String taskList) {
this.s3Client = s3Client;
this.localDirectory = localDirectory;
this.hostSpecificTaskList = taskList;
}
@Override
public void upload(String bucketName, String localName, String targetName) {
uploadFileToS3(bucketName, localDirectory + localName, targetName);
}
/**
*
* @param bucket
* Name of S3 bucket
* @param localName
* Name of the file to upload
* @param remoteName
* Key for the S3 object
* @param fromBox
* The value for this parameter is used to control the scheduling
* of this Activity at runtime. In this case it is putting
* requirement to run this activity on the machine name passed
* in. We want to run this activity on the same box that ran the
* download.
* @return A Value object
*/
private void uploadFileToS3(String bucket, String localName, String remoteName) {
System.out.println("uploadToS3 begin remoteName=" + remoteName + ", localName=" + localName);
File f = new File(localName);
s3Client.putObject(bucket, remoteName, f);
System.out.println("uploadToS3 done");
}
@Override
public String download(String bucketName, String remoteName, String localName) throws Exception {
return downloadFileFromS3(bucketName, remoteName, localDirectory + localName);
}
/**
*
* @param bucketName
* Name of S3 bucket
* @param remoteName
* Key to use for uploaded S3 object
* @param localName
* Name of the file locally
* @param toBox
* This is an output parameter here. Used to communicate the name
* of the box that runs download activity
* @return A Value object
* @throws IOException
*/
private String downloadFileFromS3(String bucketName, String remoteName, String localName) throws IOException {
System.out.println("downloadFileFromS3 begin remoteName=" + remoteName + ", localName=" + localName);
FileOutputStream f = new FileOutputStream(localName);
try {
S3Object obj = s3Client.getObject(bucketName, remoteName);
InputStream inputStream = obj.getObjectContent();
long totalSize = obj.getObjectMetadata().getContentLength();
try {
long totalRead = 0;
int read = 0;
byte[] bytes = new byte[1024];
long lastHeartbeatTime = System.currentTimeMillis();
while ((read = inputStream.read(bytes)) != -1) {
totalRead += read;
f.write(bytes, 0, read);
int progress = (int) (totalRead / totalSize * 100);
lastHeartbeatTime = heartbeat(lastHeartbeatTime, progress);
}
}
finally {
inputStream.close();
}
}
finally {
f.close();
}
// Return hostname file was downloaded to
System.out.println("downloadFileFromS3 done");
return hostSpecificTaskList;
}
@Override
public void deleteLocalFile(String fileName) {
deleteLocalFiles(localDirectory + fileName);
}
/**
*
* @param fileName
* Filename to delete locally
* @param machineName
* The value for this parameter is used to control the scheduling
* of this Activity at runtime. In this case it is putting
* requirement to run this activity on the machine name passed
* in.
* @return
*/
private void deleteLocalFiles(String fileName) {
System.out.println("deleteLocalActivity begin fileName=" + fileName);
File f = new File(fileName);
f.delete();
System.out.println("deleteLocalActivity done");
}
/**
* Heartbeat every 5 minutes. It is not a good idea to heartbeat too
* frequently as each noteActivityProgress event ends up eating history
* events count.
*
* @return time of the last heartbeat
*/
private long heartbeat(long lastHeartbeatTime, int progress) {
if (System.currentTimeMillis() - lastHeartbeatTime > HEARTBEAT_INTERVAL) {
ActivityExecutionContext context = contextProvider.getActivityExecutionContext();
context.recordActivityHeartbeat(Integer.toString((progress)));
lastHeartbeatTime = System.currentTimeMillis();
}
return lastHeartbeatTime;
}
}
| 3,389 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/SimpleStoreActivities.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import com.amazonaws.services.simpleworkflow.flow.annotations.Activities;
import com.amazonaws.services.simpleworkflow.flow.annotations.ActivityRegistrationOptions;
import com.amazonaws.services.simpleworkflow.flow.annotations.ExponentialRetry;
@Activities(version = "1.0")
@ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 60, defaultTaskStartToCloseTimeoutSeconds = 120)
public interface SimpleStoreActivities {
/**
*
* @param localName
* Name of the file to upload from temporary directory
* @param remoteName
* Name of the file to use on S3 bucket after upload
* @param fromBox
* Machine name which has the file that needs to be uploaded
* @return
*/
@ExponentialRetry(initialRetryIntervalSeconds = 10, maximumAttempts = 10)
public void upload(String bucketName, String localName, String targetName);
/**
*
* @param remoteName
* Name of the file to download from S3 bucket
* @param localName
* Name of the file used locally after download
* @param toBox
* This is an output parameter here.
* Used to communicate the name of the box that runs download activity
*/
@ExponentialRetry(initialRetryIntervalSeconds = 10, maximumAttempts = 10)
public String download(String bucketName, String remoteName, String localName) throws Exception;
/**
*
* @param fileName
* Name of file to delete from temporary folder
* @param machineName
* Machine which has the file locally
* @return
*/
@ExponentialRetry(initialRetryIntervalSeconds=10)
public void deleteLocalFile(String fileName);
}
| 3,390 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/FileProcessingWorkflowZipImpl.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import java.io.File;
import java.io.IOException;
import com.amazonaws.services.simpleworkflow.flow.ActivitySchedulingOptions;
import com.amazonaws.services.simpleworkflow.flow.DecisionContextProviderImpl;
import com.amazonaws.services.simpleworkflow.flow.WorkflowContext;
import com.amazonaws.services.simpleworkflow.flow.annotations.Asynchronous;
import com.amazonaws.services.simpleworkflow.flow.core.Promise;
import com.amazonaws.services.simpleworkflow.flow.core.Settable;
import com.amazonaws.services.simpleworkflow.flow.core.TryCatchFinally;
/**
* This implementation of FileProcessingWorkflow downloads the file, zips it and
* uploads it back to S3
*/
public class FileProcessingWorkflowZipImpl implements FileProcessingWorkflow {
private final SimpleStoreActivitiesClient store;
private final FileProcessingActivitiesClient processor;
private final WorkflowContext workflowContext;
private String state = "Started";
public FileProcessingWorkflowZipImpl() {
// Create activity clients
this.store = new SimpleStoreActivitiesClientImpl();
processor = new FileProcessingActivitiesClientImpl();
workflowContext = (new DecisionContextProviderImpl()).getDecisionContext().getWorkflowContext();
}
/**
* Constructor used for unit testing or when Spring is used to configure
* workflow
*/
public FileProcessingWorkflowZipImpl(SimpleStoreActivitiesClient store, FileProcessingActivitiesClient processor,
WorkflowContext workflowContext) {
this.store = store;
this.processor = processor;
this.workflowContext = workflowContext;
}
@Override
public void processFile(final String sourceBucketName, final String sourceFilename, final String targetBucketName,
final String targetFilename) throws IOException {
// Settable to store the worker specific task list returned by the activity
final Settable<String> taskList = new Settable<String>();
// Use runId as a way to ensure that downloaded files do not get name collisions
String workflowRunId = workflowContext.getWorkflowExecution().getRunId();
File localSource = new File(sourceFilename);
final String localSourceFilename = workflowRunId + "_" + localSource.getName();
File localTarget = new File(targetFilename);
final String localTargetFilename = workflowRunId + "_" + localTarget.getName();
new TryCatchFinally() {
@Override
protected void doTry() throws Throwable {
Promise<String> activityWorkerTaskList = store.download(sourceBucketName, sourceFilename, localSourceFilename);
// chaining is a way for one promise get assigned value of another
taskList.chain(activityWorkerTaskList);
// Call processFile activity to zip the file
Promise<Void> fileProcessed = processFileOnHost(localSourceFilename, localTargetFilename, activityWorkerTaskList);
// Call upload activity to upload zipped file
upload(targetBucketName, targetFilename, localTargetFilename, taskList, fileProcessed);
}
@Override
protected void doCatch(Throwable e) throws Throwable {
state = "Failed: " + e.getMessage();
throw e;
}
@Override
protected void doFinally() throws Throwable {
if (taskList.isReady()) { // File was downloaded
// Set option to schedule activity in worker specific task list
ActivitySchedulingOptions options = new ActivitySchedulingOptions().withTaskList(taskList.get());
// Call deleteLocalFile activity using the host specific task list
store.deleteLocalFile(localSourceFilename, options);
store.deleteLocalFile(localTargetFilename, options);
}
if (!state.startsWith("Failed:")) {
state = "Completed";
}
}
};
}
@Asynchronous
private Promise<Void> processFileOnHost(String fileToProcess, String fileToUpload, Promise<String> taskList) {
state = "Downloaded to " + taskList.get();
// Call the activity to process the file using worker specific task list
ActivitySchedulingOptions options = new ActivitySchedulingOptions().withTaskList(taskList.get());
return processor.processFile(fileToProcess, fileToUpload, options);
}
@Asynchronous
private void upload(final String targetBucketName, final String targetFilename, final String localTargetFilename,
Promise<String> taskList, Promise<Void> fileProcessed) {
state = "Processed at " + taskList.get();
ActivitySchedulingOptions options = new ActivitySchedulingOptions().withTaskList(taskList.get());
store.upload(targetBucketName, localTargetFilename, targetFilename, options);
}
@Override
public String getState() {
return state;
}
}
| 3,391 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/WorkflowExecutionStarter.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
/**
* This is used for launching a Workflow instance of FileProcessingWorkflowExample
*/
public class WorkflowExecutionStarter {
private static AmazonSimpleWorkflow swfService;
private static String domain;
public static void main(String[] args) throws Exception {
// Load configuration
ConfigHelper configHelper = ConfigHelper.createConfig();
// Create the client for Simple Workflow Service
swfService = configHelper.createSWFClient();
domain = configHelper.getDomain();
// Start Workflow instance
String sourceBucketName = configHelper.getValueFromConfig(FileProcessingConfigKeys.WORKFLOW_INPUT_SOURCEBUCKETNAME_KEY);
String sourceFilename = configHelper.getValueFromConfig(FileProcessingConfigKeys.WORKFLOW_INPUT_SOURCEFILENAME_KEY);
String targetBucketName = configHelper.getValueFromConfig(FileProcessingConfigKeys.WORKFLOW_INPUT_TARGETBUCKETNAME_KEY);
String targetFilename = configHelper.getValueFromConfig(FileProcessingConfigKeys.WORKFLOW_INPUT_TARGETFILENAME_KEY);
FileProcessingWorkflowClientExternalFactory clientFactory = new FileProcessingWorkflowClientExternalFactoryImpl(swfService, domain);
FileProcessingWorkflowClientExternal workflow = clientFactory.getClient();
workflow.processFile(sourceBucketName, sourceFilename, targetBucketName, targetFilename);
// WorkflowExecution is available after workflow creation
WorkflowExecution workflowExecution = workflow.getWorkflowExecution();
System.out.println("Started periodic workflow with workflowId=\"" + workflowExecution.getWorkflowId()
+ "\" and runId=\"" + workflowExecution.getRunId() + "\"");
System.exit(0);
}
}
| 3,392 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/FileProcessingWorkflow.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import java.io.IOException;
import com.amazonaws.services.simpleworkflow.flow.annotations.Execute;
import com.amazonaws.services.simpleworkflow.flow.annotations.GetState;
import com.amazonaws.services.simpleworkflow.flow.annotations.Workflow;
import com.amazonaws.services.simpleworkflow.flow.annotations.WorkflowRegistrationOptions;
/**
* Contract for file processing workflow
*/
@Workflow
@WorkflowRegistrationOptions(defaultExecutionStartToCloseTimeoutSeconds = 300, defaultTaskStartToCloseTimeoutSeconds = 10)
public interface FileProcessingWorkflow {
@Execute(name = "ProcessFile", version = "1.0")
public void processFile(String sourceBucketName, String sourceFilename, String targetBucketName, String targetFilename)
throws IOException;
@GetState
public String getState();
}
| 3,393 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/fileprocessing/WorkflowHost.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.fileprocessing;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.WorkflowWorker;
import com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper;
/**
* This is the process which hosts all SWF Deciders and Activities specified in
* this package
*/
public class WorkflowHost {
private static final String DECISION_TASK_LIST = "FileProcessing";
public static void main(String[] args) throws Exception {
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
final WorkflowWorker worker = new WorkflowWorker(swfService, domain, DECISION_TASK_LIST);
worker.addWorkflowImplementationType(FileProcessingWorkflowZipImpl.class);
worker.start();
System.out.println("Workflow Host Service Started...");
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
worker.shutdownAndAwaitTermination(1, TimeUnit.MINUTES);
System.out.println("Workflow Host Service Terminated...");
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.out.println("Please press any key to terminate service.");
try {
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
System.exit(0);
}
}
| 3,394 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/common/WorkflowExecutionReplayer.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.common;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.WorkflowReplayer;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
/**
* Simple example utility to pretty print workflow execution history.
*
* @author fateev
*/
public class WorkflowExecutionReplayer {
public static void main(String[] args) throws Exception {
if (args.length < 3) {
System.err.println("Usage: java " + WorkflowExecutionReplayer.class.getName()
+ "<workflow implementation class> <workflowId> <runId>");
System.exit(1);
}
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
WorkflowExecution workflowExecution = new WorkflowExecution();
String workflowId = args[1];
workflowExecution.setWorkflowId(workflowId);
String runId = args[2];
workflowExecution.setRunId(runId);
String implementationTypeName = args[0];
@SuppressWarnings("unchecked")
Class<Object> workflowImplementationType = (Class<Object>) Class.forName(implementationTypeName);
WorkflowReplayer<Object> replayer = new WorkflowReplayer<Object>(swfService, domain, workflowExecution,
workflowImplementationType);
System.out.println("Beginning workflow replay for " + workflowExecution);
Object workflow = replayer.loadWorkflow();
System.out.println("Workflow implementation object:");
System.out.println(workflow);
System.out.println("Done workflow replay for " + workflowExecution);
}
}
| 3,395 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/common/WorkflowExecutionFlowThreadDumper.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.common;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.WorkflowException;
import com.amazonaws.services.simpleworkflow.flow.WorkflowReplayer;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
/**
* Simple example utility to pretty print workflow execution history.
*
* @author fateev
*/
public class WorkflowExecutionFlowThreadDumper {
public static void main(String[] args) throws Exception {
if (args.length < 3) {
System.err.println("Usage: java " + WorkflowExecutionFlowThreadDumper.class.getName()
+ "<workflow implementation class> <workflowId> <runId>");
System.exit(1);
}
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
WorkflowExecution workflowExecution = new WorkflowExecution();
String workflowId = args[1];
workflowExecution.setWorkflowId(workflowId);
String runId = args[2];
workflowExecution.setRunId(runId);
String implementationTypeName = args[0];
@SuppressWarnings("unchecked")
Class<Object> workflowImplementationType = (Class<Object>) Class.forName(implementationTypeName);
WorkflowReplayer<Object> replayer = new WorkflowReplayer<Object>(swfService, domain, workflowExecution,
workflowImplementationType);
System.out.println("Beginning workflow replay for " + workflowExecution);
try {
String flowThreadDump = replayer.getAsynchronousThreadDumpAsString();
System.out.println("Workflow asynchronous thread dump:");
System.out.println(flowThreadDump);
}
catch (WorkflowException e) {
System.out.println("No asynchronous thread dump available as workflow has failed: " + e);
}
}
}
| 3,396 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/common/RegistrationDefaults.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.common;
import com.amazonaws.services.simpleworkflow.flow.common.FlowConstants;
public final class RegistrationDefaults {
private static final long SECONDS_IN_MINUTE = 60;
//private static final long SECONDS_IN_HOUR = 60 * SECONDS_IN_MINUTE;
//private static final long SECONDS_IN_DAY = 24 * SECONDS_IN_HOUR;
public static final long ACTIVITY_REGISTRATION_OPTIONS_DEFAULT_TASK_SCHEDULE_TO_START_TIMEOUT_SECONDS = FlowConstants.NONE;
public static final long ACTIVITY_REGISTRATION_OPTIONS_DEFAULT_TASK_HEARTBEAT_TIMEOUT_SECONDS = FlowConstants.NONE;
public static final long ACTIVITY_REGISTRATION_OPTIONS_DEFAULT_TASK_SCHEDULE_TO_CLOSE_TIMEOUT_SECONDS = 5 * SECONDS_IN_MINUTE;
public static final long ACTIVITY_REGISTRATION_OPTIONS_DEFAULT_TASK_START_TO_CLOSE_TIMEOUT_SECONDS = 5 * SECONDS_IN_MINUTE;
public static final long WORKFLOW_REGISTRATION_OPTIONS_DEFAULT_EXECUTION_START_TO_CLOSE_TIMEOUT_SECONDS = 10 * SECONDS_IN_MINUTE;
public static final long WORKFLOW_REGISTRATION_OPTIONS_DEFAULT_TASK_START_TO_CLOSE_TIMEOUT_SECONDS = 1 * SECONDS_IN_MINUTE;
}
| 3,397 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/common/ConfigKeys.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.common;
public class ConfigKeys {
public static final String SWF_SERVICE_URL_KEY = "service.url";
public static final String SWF_ACCESS_ID_KEY = "AWS.Access.ID";
public static final String SWF_SECRET_KEY_KEY = "AWS.Secret.Key";
public static final String SWF_LAMBDA_ROLE_ARN = "SWF.LambdaRole.ARN";
public static final String SWF_LAMBDA_FUNCTION = "SWF.LambdaFunction.Name";
public static final String SWF_LAMBDA_FUNCTION_INPUT = "SWF.LambdaFunction.Input";
public static final String S3_ACCESS_ID_KEY = "S3.Access.ID";
public static final String S3_SECRET_KEY_KEY = "S3.Secret.Key";
public static final String DOMAIN_KEY = "domain";
public static final String DOMAIN_RETENTION_PERIOD_KEY = "domainRetentionPeriodInDays";
}
| 3,398 |
0 | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples | Create_ds/aws-swf-flow-library/src/samples/AwsFlowFramework/src/com/amazonaws/services/simpleworkflow/flow/examples/common/WorkflowExecutionHistoryPrinter.java | /**
* Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.examples.common;
import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow;
import com.amazonaws.services.simpleworkflow.flow.common.WorkflowExecutionUtils;
import com.amazonaws.services.simpleworkflow.model.WorkflowExecution;
/**
* Simple example utility to pretty print workflow execution history.
*
* @author fateev
*/
public class WorkflowExecutionHistoryPrinter {
public static void main(String[] args) throws Exception {
if (args.length < 2) {
System.err.println("Usage: java " + WorkflowExecutionHistoryPrinter.class.getName() + " <workflowId> <runId>");
System.exit(1);
}
ConfigHelper configHelper = ConfigHelper.createConfig();
AmazonSimpleWorkflow swfService = configHelper.createSWFClient();
String domain = configHelper.getDomain();
WorkflowExecution workflowExecution = new WorkflowExecution();
String workflowId = args[0];
workflowExecution.setWorkflowId(workflowId);
String runId = args[1];
workflowExecution.setRunId(runId);
System.out.println(WorkflowExecutionUtils.prettyPrintHistory(swfService, domain, workflowExecution, true));
}
}
| 3,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.