gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.catalog;
import org.apache.falcon.FalconException;
import org.apache.falcon.resource.TestContext;
import org.apache.falcon.security.CurrentUser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hive.hcatalog.api.HCatAddPartitionDesc;
import org.apache.hive.hcatalog.api.HCatClient;
import org.apache.hive.hcatalog.api.HCatCreateDBDesc;
import org.apache.hive.hcatalog.api.HCatCreateTableDesc;
import org.apache.hive.hcatalog.api.HCatPartition;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Tests Hive Meta Store service.
*/
public class HiveCatalogServiceIT {
private static final String METASTORE_URL = "thrift://localhost:49083";
private static final String DATABASE_NAME = "falcon_db";
private static final String TABLE_NAME = "falcon_table";
private static final String EXTERNAL_TABLE_NAME = "falcon_external";
private static final String EXTERNAL_TABLE_LOCATION = "jail://global:00/falcon/staging/falcon_external";
private final Configuration conf = new Configuration(false);
private HiveCatalogService hiveCatalogService;
private HCatClient client;
@BeforeClass
public void setUp() throws Exception {
// setup a logged in user
CurrentUser.authenticate(TestContext.REMOTE_USER);
hiveCatalogService = new HiveCatalogService();
client = TestContext.getHCatClient(METASTORE_URL);
createDatabase();
createTable();
createExternalTable();
}
private void createDatabase() throws Exception {
HCatCreateDBDesc dbDesc = HCatCreateDBDesc.create(DATABASE_NAME)
.ifNotExists(true).build();
client.createDatabase(dbDesc);
}
public void createTable() throws Exception {
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", HCatFieldSchema.Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", HCatFieldSchema.Type.STRING, "value comment"));
List<HCatFieldSchema> partitionSchema = Arrays.asList(
new HCatFieldSchema("ds", HCatFieldSchema.Type.STRING, ""),
new HCatFieldSchema("region", HCatFieldSchema.Type.STRING, "")
);
HCatCreateTableDesc tableDesc = HCatCreateTableDesc
.create(DATABASE_NAME, TABLE_NAME, cols)
.fileFormat("rcfile")
.ifNotExists(true)
.comments("falcon integration test")
.partCols(new ArrayList<HCatFieldSchema>(partitionSchema))
.build();
client.createTable(tableDesc);
}
public void createExternalTable() throws Exception {
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", HCatFieldSchema.Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", HCatFieldSchema.Type.STRING, "value comment"));
List<HCatFieldSchema> partitionSchema = Arrays.asList(
new HCatFieldSchema("ds", HCatFieldSchema.Type.STRING, ""),
new HCatFieldSchema("region", HCatFieldSchema.Type.STRING, "")
);
HCatCreateTableDesc tableDesc = HCatCreateTableDesc
.create(DATABASE_NAME, EXTERNAL_TABLE_NAME, cols)
.fileFormat("rcfile")
.ifNotExists(true)
.comments("falcon integration test")
.partCols(new ArrayList<HCatFieldSchema>(partitionSchema))
.isTableExternal(true)
.location(EXTERNAL_TABLE_LOCATION)
.build();
client.createTable(tableDesc);
}
@AfterClass
public void tearDown() throws Exception {
dropTable(EXTERNAL_TABLE_NAME);
dropTable(TABLE_NAME);
dropDatabase();
}
private void dropTable(String tableName) throws Exception {
client.dropTable(DATABASE_NAME, tableName, true);
}
private void dropDatabase() throws Exception {
client.dropDatabase(DATABASE_NAME, true, HCatClient.DropDBMode.CASCADE);
}
@BeforeMethod
private void addPartitions() throws Exception {
Map<String, String> firstPtn = new HashMap<String, String>();
firstPtn.put("ds", "20130903"); //yyyyMMDD
firstPtn.put("region", "us");
HCatAddPartitionDesc addPtn = HCatAddPartitionDesc.create(
DATABASE_NAME, TABLE_NAME, null, firstPtn).build();
client.addPartition(addPtn);
Map<String, String> secondPtn = new HashMap<String, String>();
secondPtn.put("ds", "20130903");
secondPtn.put("region", "in");
HCatAddPartitionDesc addPtn2 = HCatAddPartitionDesc.create(
DATABASE_NAME, TABLE_NAME, null, secondPtn).build();
client.addPartition(addPtn2);
Map<String, String> thirdPtn = new HashMap<String, String>();
thirdPtn.put("ds", "20130902");
thirdPtn.put("region", "in");
HCatAddPartitionDesc addPtn3 = HCatAddPartitionDesc.create(
DATABASE_NAME, TABLE_NAME, null, thirdPtn).build();
client.addPartition(addPtn3);
}
@AfterMethod
private void dropPartitions() throws Exception {
Map<String, String> partitionSpec = new HashMap<String, String>();
partitionSpec.put("ds", "20130903");
client.dropPartitions(DATABASE_NAME, TABLE_NAME, partitionSpec, true);
partitionSpec = new HashMap<String, String>();
partitionSpec.put("ds", "20130902");
client.dropPartitions(DATABASE_NAME, TABLE_NAME, partitionSpec, true);
}
@Test
public void testIsAlive() throws Exception {
Assert.assertTrue(hiveCatalogService.isAlive(conf, METASTORE_URL));
}
@Test (expectedExceptions = Exception.class)
public void testIsAliveNegative() throws Exception {
hiveCatalogService.isAlive(conf, "thrift://localhost:9999");
}
@Test
public void testTableExistsNegative() throws Exception {
Assert.assertFalse(hiveCatalogService.tableExists(conf, METASTORE_URL, DATABASE_NAME, "blah"));
}
@Test
public void testTableExists() throws Exception {
Assert.assertTrue(hiveCatalogService.tableExists(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME));
}
@Test
public void testIsTableExternalFalse() throws Exception {
Assert.assertFalse(hiveCatalogService.isTableExternal(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME));
}
@Test
public void testIsTableExternalTrue() throws Exception {
Assert.assertTrue(hiveCatalogService.isTableExternal(conf, METASTORE_URL, DATABASE_NAME, EXTERNAL_TABLE_NAME));
}
@Test
public void testListPartitionsByFilterNull() throws Exception {
List<CatalogPartition> filteredPartitions = hiveCatalogService.listPartitionsByFilter(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, null);
Assert.assertEquals(filteredPartitions.size(), 3);
}
@DataProvider (name = "lessThanFilter")
public Object[][] createLessThanFilter() {
return new Object[][] {
{"ds < \"20130905\"", 3},
{"ds < \"20130904\"", 3},
{"ds < \"20130903\"", 1},
{"ds < \"20130902\"", 0},
};
}
@Test (dataProvider = "lessThanFilter")
public void testListPartitionsByFilterLessThan(String lessThanFilter, int expectedPartitionCount)
throws Exception {
List<CatalogPartition> filteredPartitions = hiveCatalogService.listPartitionsByFilter(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, lessThanFilter);
Assert.assertEquals(filteredPartitions.size(), expectedPartitionCount);
}
@DataProvider (name = "greaterThanFilter")
public Object[][] createGreaterThanFilter() {
return new Object[][] {
{"ds > \"20130831\"", 3},
{"ds > \"20130905\"", 0},
{"ds > \"20130904\"", 0},
{"ds > \"20130903\"", 0},
{"ds > \"20130902\"", 2},
};
}
@Test (dataProvider = "greaterThanFilter")
public void testListPartitionsByFilterGreaterThan(String greaterThanFilter, int expectedPartitionCount)
throws Exception {
List<CatalogPartition> filteredPartitions = hiveCatalogService.listPartitionsByFilter(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, greaterThanFilter);
Assert.assertEquals(filteredPartitions.size(), expectedPartitionCount);
}
@Test
public void testListPartititions() throws FalconException {
List<String> filters = new ArrayList<String>();
filters.add("20130903");
List<CatalogPartition> partitions = hiveCatalogService.listPartitions(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, filters);
Assert.assertEquals(partitions.size(), 2);
filters.add("us");
partitions = hiveCatalogService.listPartitions(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, filters);
Assert.assertEquals(partitions.size(), 1);
}
@Test
public void testGetPartitionsFullSpec() throws Exception {
Map<String, String> partitionSpec = new HashMap<String, String>();
partitionSpec.put("ds", "20130902");
partitionSpec.put("region", "in");
HCatPartition ptn = client.getPartition(DATABASE_NAME, TABLE_NAME, partitionSpec);
Assert.assertTrue(ptn != null);
}
@Test
public void testGetPartitionsPartialSpec() throws Exception {
Map<String, String> partialPartitionSpec = new HashMap<String, String>();
partialPartitionSpec.put("ds", "20130903");
List<HCatPartition> partitions = client.getPartitions(DATABASE_NAME, TABLE_NAME, partialPartitionSpec);
Assert.assertEquals(partitions.size(), 2);
}
@Test
public void testDropPartition() throws Exception {
hiveCatalogService.dropPartition(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, Arrays.asList("20130902", "in"), true);
List<HCatPartition> partitions = client.getPartitions(DATABASE_NAME, TABLE_NAME);
Assert.assertEquals(partitions.size(), 2, "Unexpected number of partitions");
Assert.assertEquals(new String[]{"20130903", "in"},
partitions.get(0).getValues().toArray(), "Mismatched partition");
hiveCatalogService.dropPartitions(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, Arrays.asList("20130903"), true);
partitions = client.getPartitions(DATABASE_NAME, TABLE_NAME);
Assert.assertEquals(partitions.size(), 0, "Unexpected number of partitions");
}
@Test
public void testGetPartition() throws Exception {
CatalogPartition partition = CatalogServiceFactory.getCatalogService().getPartition(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, Arrays.asList("20130902", "in"));
Assert.assertNotNull(partition);
long createTime = partition.getCreateTime();
Assert.assertTrue(createTime > 0);
}
@Test
public void testReInstatePartition() throws Exception {
Map<String, String> partitionSpec = new LinkedHashMap<String, String>();
partitionSpec.put("ds", "20130918");
partitionSpec.put("region", "blah");
HCatAddPartitionDesc first = HCatAddPartitionDesc.create(
DATABASE_NAME, TABLE_NAME, null, partitionSpec).build();
client.addPartition(first);
CatalogPartition partition = CatalogServiceFactory.getCatalogService().getPartition(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, new ArrayList<String>(partitionSpec.values()));
Assert.assertNotNull(partition);
final long originalCreateTime = partition.getCreateTime();
Thread.sleep(1000); // sleep before deletion
client.dropPartitions(DATABASE_NAME, TABLE_NAME, partitionSpec, true);
Thread.sleep(1000); // sleep so the next add is delayed a bit
HCatAddPartitionDesc second = HCatAddPartitionDesc.create(
DATABASE_NAME, TABLE_NAME, null, partitionSpec).build();
client.addPartition(second);
CatalogPartition reInstatedPartition = CatalogServiceFactory.getCatalogService().getPartition(
conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, new ArrayList<String>(partitionSpec.values()));
Assert.assertNotNull(reInstatedPartition);
final long reInstatedCreateTime = reInstatedPartition.getCreateTime();
Assert.assertTrue(reInstatedCreateTime > originalCreateTime);
}
@DataProvider (name = "tableName")
public Object[][] createTableName() {
return new Object[][] {
{TABLE_NAME},
{EXTERNAL_TABLE_NAME},
};
}
@Test
public void testGetPartitionColumns() throws FalconException {
AbstractCatalogService catalogService = CatalogServiceFactory.getCatalogService();
List<String> columns = catalogService.getPartitionColumns(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME);
Assert.assertEquals(columns, Arrays.asList("ds", "region"));
}
@Test
public void testAddPartition() throws FalconException {
AbstractCatalogService catalogService = CatalogServiceFactory.getCatalogService();
List<String> partitionValues = Arrays.asList("20130902", "us");
String location = EXTERNAL_TABLE_LOCATION + "/20130902";
catalogService.addPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location);
CatalogPartition partition =
catalogService.getPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues);
Assert.assertEquals(partition.getLocation(), location);
try {
catalogService.addPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location);
} catch (FalconException e) {
if (!(e.getCause() instanceof AlreadyExistsException)) {
Assert.fail("Expected FalconException(AlreadyExistsException)");
}
}
}
@Test
public void testUpdatePartition() throws FalconException {
AbstractCatalogService catalogService = CatalogServiceFactory.getCatalogService();
List<String> partitionValues = Arrays.asList("20130902", "us");
String location = EXTERNAL_TABLE_LOCATION + "/20130902";
catalogService.addPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location);
CatalogPartition partition =
catalogService.getPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues);
Assert.assertEquals(partition.getLocation(), location);
String location2 = location + "updated";
catalogService.updatePartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location2);
partition = catalogService.getPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues);
Assert.assertEquals(partition.getLocation(), location2);
}
}
| |
package com.oneops.gslb;
import static org.apache.commons.lang.StringUtils.isNotEmpty;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonParser;
import com.oneops.cms.simple.domain.CmsCISimple;
import com.oneops.cms.simple.domain.CmsRfcCISimple;
import com.oneops.cms.simple.domain.CmsWorkOrderSimple;
import com.oneops.infoblox.InfobloxClient;
import com.oneops.infoblox.model.a.ARec;
import com.oneops.infoblox.model.cname.CNAME;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class DnsHandler {
@Autowired
WoHelper woHelper;
@Autowired
JsonParser jsonParser;
@Autowired
Gson gson;
@Autowired
InfobloxClientProvider infobloxClientProvider;
private static final Logger logger = Logger.getLogger(DnsHandler.class);
private static final String ATTRIBUTE_ALIAS = "aliases";
private static final String ATTRIBUTE_FULL_ALIAS = "full_aliases";
private static final String ATTRIBUTE_ZONE = "zone";
private static final String ATTRIBUTE_HOST = "host";
private static final String ATTRIBUTE_USER_NAME = "username";
private static final String ATTRIBUTE_PASSWORD = "password";
private InfobloxClient getInfoBloxClient(CmsWorkOrderSimple wo, Context context) throws ExecutionException {
Map<String, String> attributes = context.getDnsAttrs();
String host = attributes.get(ATTRIBUTE_HOST);
String user = attributes.get(ATTRIBUTE_USER_NAME);
String pwd = attributes.get(ATTRIBUTE_PASSWORD);
InfobloxClient client;
if (StringUtils.isNotBlank(host) && StringUtils.isNotBlank(user)) {
client = infobloxClientProvider.getInfobloxClient(host, user, pwd);
}
else {
throw new ExecutionException("Infoblox client could not be initialized. check cloud service configuration");
}
return client;
}
public void setupDnsEntries(CmsWorkOrderSimple wo, Context context) {
logger.info(context.getLogKey() + "setting up cnames");
InfobloxClient infoBloxClient;
try {
setDnsAttributes(wo, context);
infoBloxClient = getInfoBloxClient(wo, context);
} catch (ExecutionException e) {
woHelper.failWo(wo, context.getLogKey(), "Failed while initializing infoblox client", e);
return;
}
CmsRfcCISimple rfc = wo.getRfcCi();
Set<String> currentAliases = new HashSet<>();
Map<String, String> ciAttributes = rfc.getCiAttributes();
String defaultAlias = getFullAlias(context.getPlatform(), context);
currentAliases.add(defaultAlias);
addAlias(ciAttributes.get(ATTRIBUTE_ALIAS), currentAliases, t -> (getFullAlias(t, context)));
addAlias(ciAttributes.get(ATTRIBUTE_FULL_ALIAS), currentAliases, Function.identity());
if (woHelper.isDeleteAction(wo)) {
if (context.isPlatformDisabled()) {
logger.info(context.getLogKey() + "deleting all cnames as platform is getting disabled");
deleteCNames(wo, context, currentAliases, infoBloxClient);
}
else {
logger.info(context.getLogKey() + "platform is not disabled, deleting only cloud cname");
}
deleteCloudEntry(wo, context, infoBloxClient);
}
else {
Set<String> oldAliases = new HashSet<>();
Map<String, String> ciBaseAttributes = rfc.getCiBaseAttributes();
addAlias(ciBaseAttributes.get(ATTRIBUTE_ALIAS), oldAliases, t -> (getFullAlias(t, context)));
addAlias(ciBaseAttributes.get(ATTRIBUTE_FULL_ALIAS), oldAliases, Function.identity());
List<String> aliasesToRemove = oldAliases.stream().filter(a -> !currentAliases.contains(a)).collect(Collectors.toList());
deleteCNames(wo, context, aliasesToRemove, infoBloxClient);
Map<String, String> entriesMap = new HashMap<>();
addCnames(wo, context, currentAliases, infoBloxClient, entriesMap);
addCloudEntry(wo, context, infoBloxClient, entriesMap);
if (!woHelper.isFailed(wo)) {
updateWoResult(wo, entriesMap, context);
}
}
}
private void deleteCloudEntry(CmsWorkOrderSimple wo, Context context, InfobloxClient infobloxClient) {
String cloudEntry = getCloudDnsEntry(context);
logger.info(context.getLogKey() + "deleting cloud dns entry " + cloudEntry);
try {
infobloxClient.deleteARec(cloudEntry);
} catch(Exception e) {
woHelper.failWo(wo, context.getLogKey(),"Exception while deleting cloud dns entry ", e);
}
}
private void addCloudEntry(CmsWorkOrderSimple wo, Context context,
InfobloxClient infobloxClient, Map<String, String> entriesMap) {
String cloudEntry = getCloudDnsEntry(context);
CmsRfcCISimple lb = woHelper.getLbFromDependsOn(wo);
String lbVip = lb.getCiAttributes().get(MtdHandler.ATTRIBUTE_DNS_RECORD);
logger.info(context.getLogKey() + "cloud dns entry " + cloudEntry + " lbVip " + lbVip);
if (StringUtils.isNotBlank(lbVip)) {
entriesMap.put(cloudEntry, lbVip);
try {
List<ARec> records = infobloxClient.getARec(cloudEntry);
if (records != null && records.size() == 1) {
if (lbVip.equals(records.get(0).ipv4Addr())) {
logger.info(context.getLogKey() + "cloud dns entry is already set, not doing anything");
return;
}
else {
logger.info(context.getLogKey() + "cloud dns entry already exists, but not matching");
}
}
logger.info(context.getLogKey() + "cloud dns entry: " + cloudEntry + ", deleting the current entry and recreating it");
List<String> list = infobloxClient.deleteARec(cloudEntry);
logger.info(context.getLogKey() + "infoblox deleted cloud entries count " + list.size());
logger.info(context.getLogKey() + "creating cloud dns entry " + cloudEntry);
ARec aRecord = infobloxClient.createARec(cloudEntry, lbVip);
logger.info(context.getLogKey() + "arecord created " + aRecord);
} catch (IOException e) {
woHelper.failWo(wo, context.getLogKey(),"Exception while setting up cloud dns entry ", e);
}
}
}
private void updateWoResult(CmsWorkOrderSimple wo, Map<String, String> entriesMap, Context context) {
Map<String, String> resultAttrs = woHelper.getResultCiAttributes(wo);
String domainName = context.getPlatform() + context.getMtdBaseHost();
entriesMap.put(domainName, context.getPrimaryTargets() != null ? context.getPrimaryTargets().toString() : "");
resultAttrs.put("entries", gson.toJson(entriesMap));
}
private String getFullAlias(String alias, Context context) {
return String.join(".", alias, context.getSubdomain(), context.getDnsAttrs().get(ATTRIBUTE_ZONE));
}
private String getCloudDnsEntry(Context context) {
return String.join(".", context.getPlatform(), context.getSubdomain(),
context.getCloud(), context.getDnsAttrs().get(ATTRIBUTE_ZONE)).toLowerCase();
}
private void addCnames(CmsWorkOrderSimple wo, Context context,
Collection<String> aliases, InfobloxClient infoBloxClient, Map<String, String> entriesMap) {
String cname = (context.getPlatform() + context.getMtdBaseHost()).toLowerCase();
List<String> aliasList = aliases.stream().map(String::toLowerCase).collect(Collectors.toList());
logger.info(context.getLogKey() + "aliases to be added/updated " + aliasList + ", cname : " + cname);
for (String alias : aliasList) {
try {
entriesMap.put(alias, cname);
List<CNAME> existingCnames = infoBloxClient.getCNameRec(alias);
if (existingCnames != null && !existingCnames.isEmpty()) {
if (cname.equals(existingCnames.get(0).canonical())) {
//cname matches, no need to do anything
logger.info(context.getLogKey() + "cname already exists, no change needed " + alias);
}
else {
woHelper.failWo(wo, context.getLogKey(), "alias " + alias + " exists already with a different cname", null);
}
continue;
}
else {
logger.info(context.getLogKey() + "cname not found, trying to add " + alias);
try {
CNAME newCname = infoBloxClient.createCNameRec(alias, cname);
if (newCname == null || !cname.equals(newCname.canonical())) {
woHelper.failWo(wo, context.getLogKey(), "Failed to create cname ", null);
}
else {
logger.info(context.getLogKey() + "cname added successfully " + alias);
}
} catch (IOException e) {
logger.error(context.getLogKey() + "cname [" + alias + "] creation failed with " + e.getMessage());
if (e.getMessage() != null && e.getMessage().contains("IBDataConflictError")) {
logger.info(context.getLogKey() + "ignoring add cname error");
}
else {
logger.error(e);
woHelper.failWo(wo, context.getLogKey(), "Failed while adding cname " + cname, e);
}
}
}
} catch (IOException e) {
woHelper.failWo(wo, context.getLogKey(), "Failed while adding/updating cnames ", e);
}
}
}
private void deleteCNames(CmsWorkOrderSimple wo, Context context, Collection<String> aliases, InfobloxClient infoBloxClient) {
List<String> aliasList = aliases.stream().map(String::toLowerCase).collect(Collectors.toList());
logger.info(context.getLogKey() + "delete cnames " + aliasList);
aliasList.stream().forEach(
a -> {
try {
infoBloxClient.deleteCNameRec(a);
} catch(Exception e) {
if (e.getCause() != null && e.getCause().getMessage() != null
&& e.getCause().getMessage().contains("AdmConDataNotFoundError")) {
logger.info(context.getLogKey() + "delete failed with no data found for " + a + ", ignore and continue");
}
else {
woHelper.failWo(wo, context.getLogKey(), "Failed while deleting cname " + a, e);
}
}
}
);
}
private void addAlias(String attrValue, Set<String> aliases, Function<String, String> mapper) {
if (isNotEmpty(attrValue)) {
JsonArray aliasArray = (JsonArray) jsonParser.parse(attrValue);
for (JsonElement alias : aliasArray) {
aliases.add(mapper.apply(alias.getAsString()));
}
}
}
private void setDnsAttributes(CmsWorkOrderSimple wo, Context context) throws ExecutionException {
Map<String, CmsCISimple> dnsServices = wo.getServices().get("dns");
if (dnsServices != null) {
CmsCISimple dns = dnsServices.get(context.getCloud());
if (dns != null) {
Map<String, String> attributes = dns.getCiAttributes();
context.setDnsAttrs(attributes);
}
}
if (context.getDnsAttrs() == null) {
throw new ExecutionException("Infoblox client could not be initialized. check cloud service configuration");
}
}
}
| |
package org.multibit.hd.ui.views.components.enter_pin;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import net.miginfocom.swing.MigLayout;
import org.multibit.hd.core.config.Configurations;
import org.multibit.hd.ui.MultiBitUI;
import org.multibit.hd.ui.audio.Sounds;
import org.multibit.hd.ui.languages.Languages;
import org.multibit.hd.ui.languages.MessageKey;
import org.multibit.hd.ui.views.components.*;
import org.multibit.hd.ui.views.fonts.AwesomeDecorator;
import org.multibit.hd.ui.views.fonts.AwesomeIcon;
import org.multibit.hd.ui.views.fonts.TitleFontDecorator;
import javax.swing.*;
import java.awt.event.ActionEvent;
/**
* <p>View to provide the following to UI:</p>
* <ul>
* <li>Presentation of pin entry</li>
* </ul>
*
* @since 0.0.1
*/
public class EnterPinView extends AbstractComponentView<EnterPinModel> {
/**
* A read only indicator of the number of pin characters entered
*/
private JTextField pinText;
/**
* A status indicator used to tell the user if PIN is incorrect
*/
private JLabel pinStatus;
private final JButton button7 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(7), getModel().get().getPanelName() + ".button_7");
private final JButton button8 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(8), getModel().get().getPanelName() + ".button_8");
private final JButton button9 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(9), getModel().get().getPanelName() + ".button_9");
private final JButton button4 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(4), getModel().get().getPanelName() + ".button_4");
private final JButton button5 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(5), getModel().get().getPanelName() + ".button_5");
private final JButton button6 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(6), getModel().get().getPanelName() + ".button_6");
private final JButton button1 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(1), getModel().get().getPanelName() + ".button_1");
private final JButton button2 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(2), getModel().get().getPanelName() + ".button_2");
private final JButton button3 = Buttons.newPinMatrixButton(getPinMatrixButtonAction(3), getModel().get().getPanelName() + ".button_3");
private final JButton backspaceDeleteButton = Buttons.newBackspaceDeleteButton(getRemoveLastButtonPressedAction());
/**
* @param model The model backing this view
*/
public EnterPinView(EnterPinModel model) {
super(model);
}
@Override
public JPanel newComponentPanel() {
// Outer panel to align the inner panels
panel = Panels.newPanel(
new MigLayout(
Panels.migXLayout(), // Layout
"[]", // Columns
"[][]" // Rows
));
// PIN matrix display
JPanel pinMatrixPanel = Panels.newPanel(
new MigLayout(
Panels.migXLayout(), // Layout
"[]12[]12[]", // Columns
"[][][]" // Rows
));
// PIN display
JPanel pinDisplayPanel = Panels.newPanel(
new MigLayout(
Panels.migXLayout(), // Layout
"[]", // Columns
"[][]" // Rows
));
// Arrange PIN matrix buttons to mimic a numeric keypad (1 bottom left, 9 top right)
pinMatrixPanel.add(button7, MultiBitUI.SMALL_BUTTON_MIG);
pinMatrixPanel.add(button8, MultiBitUI.SMALL_BUTTON_MIG);
pinMatrixPanel.add(button9, MultiBitUI.SMALL_BUTTON_MIG + ", wrap");
pinMatrixPanel.add(button4, MultiBitUI.SMALL_BUTTON_MIG);
pinMatrixPanel.add(button5, MultiBitUI.SMALL_BUTTON_MIG);
pinMatrixPanel.add(button6, MultiBitUI.SMALL_BUTTON_MIG + ", wrap");
pinMatrixPanel.add(button1, MultiBitUI.SMALL_BUTTON_MIG);
pinMatrixPanel.add(button2, MultiBitUI.SMALL_BUTTON_MIG);
pinMatrixPanel.add(button3, MultiBitUI.SMALL_BUTTON_MIG + ", wrap");
pinText = TextBoxes.newReadOnlyTextField(10, MessageKey.ENTER_CURRENT_PIN, MessageKey.ENTER_CURRENT_PIN);
pinText.setName(getModel().get().getPanelName() + ".textbox");
TitleFontDecorator.apply(pinText, (float) (MultiBitUI.BALANCE_HEADER_LARGE_FONT_SIZE * 0.6));
pinStatus = Labels.newStatusLabel(Optional.<MessageKey>absent(), null, Optional.<Boolean>absent());
// Provide a display of numbers entered so far with delete button
pinDisplayPanel.add(pinText, "wmax 150,hmax 35");
pinDisplayPanel.add(backspaceDeleteButton, "wrap");
pinDisplayPanel.add(pinStatus, "wrap");
panel.add(pinMatrixPanel, "align center,wrap");
panel.add(pinDisplayPanel, "align center,wrap");
// Ensure we hide the status display ensure the panel presents correctly
setPinStatus(false, false);
return panel;
}
@Override
public void updateModelFromView() {
// The view is driven from the model
}
@Override
public void updateViewFromModel() {
// Update the PIN indicator with the length of the entered PIN
CharSequence pin = getModel().get().getValue();
pinText.setText(Strings.repeat("*", pin.length()));
// Ensure we hide the status display (entering new values)
setPinStatus(true, false);
}
/**
* @return An action that updates the underlying model to remove the last button pressed
*/
private AbstractAction getRemoveLastButtonPressedAction() {
return new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
getModel().get().removeLastButtonPressed();
updateViewFromModel();
}
};
}
/**
* @param position The button position
*
* @return An action that updates the underlying model with the given position
*/
private Action getPinMatrixButtonAction(final int position) {
return new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
getModel().get().addButtonPressed(position);
updateViewFromModel();
}
};
}
@Override
public void requestInitialFocus() {
}
/**
* @param status True if successful (check mark), false for failure (cross)
* @param visible True if the PIN status should be visible
*/
public void setPinStatus(boolean status, boolean visible) {
Preconditions.checkState(SwingUtilities.isEventDispatchThread(), "Must be on EDT");
// Check if we had to provide a PIN
if (pinStatus == null) {
return;
}
pinStatus.setVisible(visible);
if (status) {
// Success
pinStatus.setText(Languages.safeText(MessageKey.PIN_SUCCESS));
AwesomeDecorator.applyIcon(AwesomeIcon.CHECK, pinStatus, true, MultiBitUI.NORMAL_ICON_SIZE);
} else {
if (visible) {
// Failure rather than default hide
Sounds.playBeep(Configurations.currentConfiguration.getSound());
}
pinStatus.setText(Languages.safeText(MessageKey.PIN_FAILURE));
AwesomeDecorator.applyIcon(AwesomeIcon.TIMES, pinStatus, true, MultiBitUI.NORMAL_ICON_SIZE);
// Clear any previously entered PIN
getModel().get().setValue("");
pinText.setText("");
}
}
/**
* @param enabled True if all buttons should be enabled
*/
public void setEnabled(final boolean enabled) {
button1.setEnabled(enabled);
button2.setEnabled(enabled);
button3.setEnabled(enabled);
button4.setEnabled(enabled);
button5.setEnabled(enabled);
button6.setEnabled(enabled);
button7.setEnabled(enabled);
button8.setEnabled(enabled);
button9.setEnabled(enabled);
backspaceDeleteButton.setEnabled(enabled);
}
}
| |
package com.itracker.android.ui.fragment;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.AsyncQueryHandler;
import android.content.Context;
import android.content.Loader;
import android.database.Cursor;
import android.graphics.Typeface;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.PopupMenu;
import android.support.v7.widget.RecyclerView;
import android.text.SpannableString;
import android.text.TextUtils;
import android.text.style.ForegroundColorSpan;
import android.text.style.StyleSpan;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.itracker.android.Application;
import com.itracker.android.Config;
import com.itracker.android.R;
import com.itracker.android.data.FileDownloadManager;
import com.itracker.android.data.model.MediaDownload;
import com.itracker.android.provider.TrackerContract;
import com.itracker.android.provider.TrackerContract.DownloadStatus;
import com.itracker.android.provider.TrackerContract.FileDownloads;
import com.itracker.android.service.download.FileDownloadRequest;
import com.itracker.android.ui.adapter.MediaDownloadAdapter;
import com.itracker.android.ui.listener.DownloadStateChangedListener;
import com.itracker.android.ui.listener.MediaPlaybackDelegate;
import com.itracker.android.ui.widget.DownloadedMediaPropertiesDialog;
import com.itracker.android.utils.AppQueryHandler;
import com.itracker.android.utils.ConnectivityUtils;
import com.itracker.android.utils.ThrottledContentObserver;
import java.io.File;
import java.util.ArrayList;
import static com.itracker.android.utils.LogUtils.LOGD;
import static com.itracker.android.utils.LogUtils.makeLogTag;
public class MediaDownloadFragment extends TrackerFragment implements
DownloadStateChangedListener,
View.OnClickListener,
View.OnLongClickListener {
private static final String TAG = makeLogTag(MediaDownloadFragment.class);
private MediaDownloadAdapter mMediaDownloadAdapter;
private RecyclerView mMediaDownloadView;
private TextView mEmptyView;
private ThrottledContentObserver mMediaDownloadsObserver;
private MediaPlaybackDelegate mMediaPlaybackDelegate;
public MediaDownloadFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mMediaDownloadAdapter = new MediaDownloadAdapter(getActivity(), this, this);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_media_download, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mEmptyView = (TextView) view.findViewById(R.id.empty_view);
mMediaDownloadView = (RecyclerView) view.findViewById(R.id.media_download_view);
mMediaDownloadView.setLayoutManager(new LinearLayoutManager(getActivity()));
mMediaDownloadView.setItemAnimator(new DefaultItemAnimator());
mMediaDownloadView.setAdapter(mMediaDownloadAdapter);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
mMediaPlaybackDelegate = (MediaPlaybackDelegate) activity;
mMediaDownloadsObserver = new ThrottledContentObserver(() -> {
LOGD(TAG, "ThrottledContentObserver fired (file downloads). Content changed.");
if (isAdded()) {
LOGD(TAG, "Requesting file downloads cursor reload as a result of ContentObserver firing.");
reloadMediaDownloads(getLoaderManager(), MediaDownloadFragment.this);
}
});
mMediaDownloadsObserver.setThrottleDelay(100);
activity.getContentResolver().registerContentObserver(TrackerContract.FileDownloads.CONTENT_URI, true, mMediaDownloadsObserver);
}
@Override
public void onDetach() {
super.onDetach();
getActivity().getContentResolver().unregisterContentObserver(mMediaDownloadsObserver);
mMediaPlaybackDelegate = null;
}
@Override
public void onStart() {
super.onStart();
Application.getInstance().addUIListener(DownloadStateChangedListener.class, this);
reloadMediaDownloads(getLoaderManager(), MediaDownloadFragment.this);
}
@Override
public void onStop() {
super.onStop();
Application.getInstance().removeUIListener(DownloadStateChangedListener.class, this);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
if (!isAdded()) {
return;
}
switch (loader.getId()) {
case MediaDownloadsQuery.TOKEN_NORMAL: {
MediaDownload[] downloads = MediaDownload.downloadsFromCursor(data);
if (downloads != null && downloads.length > 0) {
mMediaDownloadAdapter.updateDownloads(downloads);
mEmptyView.setVisibility(View.GONE);
} else {
mMediaDownloadAdapter.updateDownloads(new ArrayList<>());
mEmptyView.setVisibility(View.VISIBLE);
}
break;
}
}
}
private MediaDownloadAdapter.ViewHolder findViewHolderByRequestId(String requestId) {
for (int i = 0; i < mMediaDownloadView.getChildCount(); ++i) {
View childView = mMediaDownloadView.getChildAt(i);
String fileId = (String) childView.getTag();
if (TextUtils.equals(fileId, requestId)) {
return (MediaDownloadAdapter.ViewHolder) mMediaDownloadView.getChildViewHolder(childView);
}
}
return null;
}
@Override
public void onPreparing(FileDownloadRequest request, Bundle extra) {
}
@Override
public void onPaused(FileDownloadRequest request, Bundle extra) {
}
@Override
public void onDownloading(FileDownloadRequest request, long currentFileSize, long totalFileSize,
long downloadSpeed, Bundle extra) {
MediaDownloadAdapter.ViewHolder viewHolder = findViewHolderByRequestId(request.getId());
if (viewHolder != null) {
viewHolder.updateDownloadSpeed(downloadSpeed);
viewHolder.updateDownloadProgress(currentFileSize, totalFileSize);
}
}
@Override
public void onCanceled(FileDownloadRequest request, Bundle extra) {
AsyncQueryHandler handler = new AppQueryHandler(getActivity().getContentResolver());
handler.startDelete(0, null, FileDownloads.CONTENT_URI, FileDownloads.FILE_ID + " = ?", new String[]{request.getId()});
}
@Override
public void onCompleted(FileDownloadRequest request, Uri downloadedFileUri, Bundle extra) {
}
@Override
public void onFailed(FileDownloadRequest request, String reason, Bundle extra) {
}
private void onDownloadItemClicked(MediaDownload download) {
if (download.getStatus() == DownloadStatus.COMPLETED) {
File downloadedFile = new File(download.local_location);
if (downloadedFile.exists() && downloadedFile.isFile()) {
if (mMediaPlaybackDelegate != null) {
mMediaPlaybackDelegate.startMediaPlayback(Uri.fromFile(downloadedFile), download.title);
}
} else {
Toast.makeText(getActivity(), R.string.playback_downloaded_file_failed, Toast.LENGTH_LONG).show();
}
}
}
private void onDownloadItemLongClicked(final MediaDownload download, PopupMenu popupMenu) {;
Menu menu = popupMenu.getMenu();
popupMenu.getMenuInflater().inflate(R.menu.popup_menu_download, menu);
DownloadStatus status = DownloadStatus.valueOf(download.status.toUpperCase());
if (status == DownloadStatus.PENDING || status == DownloadStatus.PREPARING ||
status == DownloadStatus.CONNECTING || status == DownloadStatus.DOWNLOADING) {
menu.findItem(R.id.action_pause_download).setVisible(true);
menu.findItem(R.id.action_cancel_download).setVisible(true);
} else if (status == DownloadStatus.PAUSED || status == DownloadStatus.FAILED) {
menu.findItem(R.id.action_start_download).setVisible(true);
menu.findItem(R.id.action_cancel_download).setVisible(true);
} else if (status == DownloadStatus.COMPLETED) {
menu.findItem(R.id.action_open_file).setVisible(true);
menu.findItem(R.id.action_delete_file).setVisible(true);
menu.findItem(R.id.action_show_property).setVisible(true);
}
popupMenu.setOnMenuItemClickListener(item -> {
switch (item.getItemId()) {
case R.id.action_open_file: {
onActionOpenFile(download);
break;
}
case R.id.action_delete_file: {
onActionDeleteFile(download);
break;
}
case R.id.action_start_download: {
onActionStartDownload(download);
break;
}
case R.id.action_pause_download: {
onActionPauseDownload(download);
break;
}
case R.id.action_cancel_download: {
onActionCancelDownload(download);
break;
}
case R.id.action_show_property: {
onActionShowProperty(download);
break;
}
}
return true;
});
popupMenu.show();
}
private void onActionOpenFile(MediaDownload download) {
if (!TextUtils.isEmpty(download.local_location)) {
if (mMediaPlaybackDelegate != null) {
mMediaPlaybackDelegate.startMediaPlayback(Uri.fromFile(new File(download.local_location)), download.title);
}
}
}
private void onActionDeleteFile(final MediaDownload download) {
AlertDialog alertDialog = new AlertDialog.Builder(getActivity())
// Set Dialog Icon
.setIcon(R.mipmap.ic_launcher)
// Set Dialog Title
.setTitle("Delete file")
// Set Dialog Message
.setMessage(R.string.delete_file_prompt)
// Positive button
.setPositiveButton("Yes", (dialog, which) -> {
AsyncQueryHandler handler = new AppQueryHandler(getActivity().getContentResolver()) {
@Override
protected void onDeleteComplete(int token, Object cookie, int result) {
deleteDownloadedFile((String) cookie);
}
};
handler.startDelete(0, download.local_location, FileDownloads.CONTENT_URI, FileDownloads.FILE_ID + " = ?", new String[]{download.identifier});
})
// Negative Button
.setNegativeButton("No", (dialog, which) -> {})
.create();
alertDialog.show();
}
private void onActionShowProperty(MediaDownload download) {
DownloadedMediaPropertiesDialog dialog = DownloadedMediaPropertiesDialog.getInstance(download);
if (getFragmentManager().findFragmentByTag(DownloadedMediaPropertiesDialog.TAG) == null) {
dialog.show(getFragmentManager(), DownloadedMediaPropertiesDialog.TAG);
}
}
private void onActionStartDownload(MediaDownload download) {
Context context = getActivity().getApplicationContext();
if (!ConnectivityUtils.isWifiConnected(context)) {
Toast.makeText(getActivity(), R.string.download_allowed_when_wifi_connected, Toast.LENGTH_LONG).show();
} else {
FileDownloadManager.getInstance().startDownload(download.identifier);
}
}
private void onActionPauseDownload(MediaDownload download) {
FileDownloadManager.getInstance().pauseDownload(download.identifier);
}
private void onActionCancelDownload(final MediaDownload download) {
Context context = Application.getInstance();
final String tmpFileLocation = Config.FILE_DOWNLOAD_DIR_PATH + download.identifier; // In the future if no default location is used, the download object should also contain the destination location
SpannableString statusText = new SpannableString(
getString(R.string.cancel_download_prompt) + "\n" + getString(R.string.remove_downloaded_data_warning));
statusText.setSpan(new ForegroundColorSpan(
ContextCompat.getColor(context, android.R.color.holo_red_light)),
getString(R.string.cancel_download_prompt).length(),
statusText.length(),
0);
statusText.setSpan(new StyleSpan(
Typeface.ITALIC),
getString(R.string.cancel_download_prompt).length(),
statusText.length(),
0);
AlertDialog alertDialog = new AlertDialog.Builder(getActivity())
// Set Dialog Icon
.setIcon(R.mipmap.ic_launcher)
// Set Dialog Title
.setTitle("Cancel download")
// Set Dialog Message
.setMessage(statusText)
// Positive button
.setPositiveButton("Yes", (dialog, which) ->
FileDownloadManager.getInstance().cancelDownload(download.identifier))
// Negative Button
.setNegativeButton("No", (dialog, which) -> {})
.create();
DownloadStatus status = download.getStatus();
if (status == DownloadStatus.PENDING || status == DownloadStatus.PREPARING ||
status == DownloadStatus.CONNECTING || status == DownloadStatus.DOWNLOADING) {
alertDialog.show();
} else if (status == DownloadStatus.FAILED || status == DownloadStatus.PAUSED) {
AsyncQueryHandler handler = new AppQueryHandler(getActivity().getContentResolver()) {
@Override
protected void onDeleteComplete(int token, Object cookie, int result) {
deleteDownloadedFile((String) cookie);
}
};
handler.startDelete(0, tmpFileLocation, FileDownloads.CONTENT_URI, FileDownloads.FILE_ID + " = ?", new String[]{download.identifier});
}
}
private void deleteDownloadedFile(String fileLocation) {
if (!TextUtils.isEmpty(fileLocation)) {
File downloadedFile = new File(fileLocation);
if (downloadedFile.exists() && downloadedFile.isFile()) {
downloadedFile.delete();
}
}
}
@Override
public void onClick(View itemView) {
MediaDownload download = mMediaDownloadAdapter.getItem((String) itemView.getTag());
onDownloadItemClicked(download);
}
@Override
public boolean onLongClick(View itemView) {
TextView downloadFileSize = (TextView) itemView.findViewById(R.id.media_file_size);
MediaDownload download = mMediaDownloadAdapter.getItem((String) itemView.getTag());
PopupMenu popupMenu = new PopupMenu(getActivity(), downloadFileSize);
onDownloadItemLongClicked(download, popupMenu);
return true;
}
}
| |
/*
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jbatch.container.impl;
import java.io.ByteArrayInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.batch.api.chunk.CheckpointAlgorithm;
import javax.batch.runtime.BatchStatus;
import com.ibm.jbatch.container.artifact.proxy.CheckpointAlgorithmProxy;
import com.ibm.jbatch.container.artifact.proxy.ChunkListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.InjectionReferences;
import com.ibm.jbatch.container.artifact.proxy.ItemProcessListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.ItemProcessorProxy;
import com.ibm.jbatch.container.artifact.proxy.ItemReadListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.ItemReaderProxy;
import com.ibm.jbatch.container.artifact.proxy.ItemWriteListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.ItemWriterProxy;
import com.ibm.jbatch.container.artifact.proxy.ProxyFactory;
import com.ibm.jbatch.container.artifact.proxy.RetryProcessListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.RetryReadListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.RetryWriteListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.SkipProcessListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.SkipReadListenerProxy;
import com.ibm.jbatch.container.artifact.proxy.SkipWriteListenerProxy;
import com.ibm.jbatch.container.context.impl.MetricImpl;
import com.ibm.jbatch.container.context.impl.StepContextImpl;
import com.ibm.jbatch.container.exception.BatchContainerRuntimeException;
import com.ibm.jbatch.container.exception.BatchContainerServiceException;
import com.ibm.jbatch.container.jobinstance.RuntimeJobExecution;
import com.ibm.jbatch.container.persistence.CheckpointAlgorithmFactory;
import com.ibm.jbatch.container.persistence.CheckpointData;
import com.ibm.jbatch.container.persistence.CheckpointDataKey;
import com.ibm.jbatch.container.persistence.CheckpointManager;
import com.ibm.jbatch.container.persistence.ItemCheckpointAlgorithm;
import com.ibm.jbatch.container.services.IPersistenceManagerService;
import com.ibm.jbatch.container.servicesmanager.ServicesManager;
import com.ibm.jbatch.container.servicesmanager.ServicesManagerImpl;
import com.ibm.jbatch.container.util.PartitionDataWrapper;
import com.ibm.jbatch.container.util.TCCLObjectInputStream;
import com.ibm.jbatch.container.validation.ArtifactValidationException;
import com.ibm.jbatch.jsl.model.Chunk;
import com.ibm.jbatch.jsl.model.ItemProcessor;
import com.ibm.jbatch.jsl.model.ItemReader;
import com.ibm.jbatch.jsl.model.ItemWriter;
import com.ibm.jbatch.jsl.model.Property;
import com.ibm.jbatch.jsl.model.Step;
public class ChunkStepControllerImpl extends SingleThreadedStepControllerImpl {
private final static String sourceClass = ChunkStepControllerImpl.class.getName();
private final static Logger logger = Logger.getLogger(sourceClass);
private Chunk chunk = null;
private ItemReaderProxy readerProxy = null;
private ItemProcessorProxy processorProxy = null;
private ItemWriterProxy writerProxy = null;
private CheckpointAlgorithmProxy checkpointProxy = null;
private CheckpointAlgorithm chkptAlg = null;
private CheckpointManager checkpointManager;
private ServicesManager servicesManager = ServicesManagerImpl.getInstance();
private IPersistenceManagerService _persistenceManagerService = null;
private SkipHandler skipHandler = null;
CheckpointDataKey readerChkptDK, writerChkptDK = null;
CheckpointData readerChkptData = null;
CheckpointData writerChkptData = null;
List<ChunkListenerProxy> chunkListeners = null;
List<SkipProcessListenerProxy> skipProcessListeners = null;
List<SkipReadListenerProxy> skipReadListeners = null;
List<SkipWriteListenerProxy> skipWriteListeners = null;
List<RetryProcessListenerProxy> retryProcessListeners = null;
List<RetryReadListenerProxy> retryReadListeners = null;
List<RetryWriteListenerProxy> retryWriteListeners = null;
List<ItemReadListenerProxy> itemReadListeners = null;
List<ItemProcessListenerProxy> itemProcessListeners = null;
List<ItemWriteListenerProxy> itemWriteListeners = null;
private RetryHandler retryHandler;
// metrics
long readCount = 0;
long writeCount = 0;
long readSkipCount = 0;
long processSkipCount = 0;
long writeSkipCount = 0;
boolean rollbackRetry = false;
public ChunkStepControllerImpl(RuntimeJobExecution jobExecutionImpl, Step step, StepContextImpl stepContext, long rootJobExecutionId, BlockingQueue<PartitionDataWrapper> analyzerStatusQueue) {
super(jobExecutionImpl, step, stepContext, rootJobExecutionId, analyzerStatusQueue);
}
/**
* Utility Class to hold statuses at each level of Read-Process-Write loop
*
*/
private class ItemStatus {
public boolean isSkipped() {
return skipped;
}
public void setSkipped(boolean skipped) {
this.skipped = skipped;
}
public boolean isFiltered() {
return filtered;
}
public void setFiltered(boolean filtered) {
this.filtered = filtered;
}
public boolean isCheckPointed() {
return checkPointed;
}
public void setCheckPointed(boolean checkPointed) {
this.checkPointed = checkPointed;
}
public boolean isFinished() {
return finished;
}
public void setFinished(boolean finished) {
this.finished = finished;
}
public boolean isRetry() {
return retry;
}
public void setRetry(boolean retry) {
this.retry = retry;
}
public boolean isRollback() {
return rollback;
}
public void setRollback(boolean rollback) {
this.rollback = rollback;
}
private boolean skipped = false;
private boolean filtered = false;
private boolean finished = false;
private boolean checkPointed = false;
private boolean retry = false;
private boolean rollback = false;
}
/**
* We read and process one item at a time but write in chunks (group of
* items). So, this method loops until we either reached the end of the
* reader (not more items to read), or the writer buffer is full or a
* checkpoint is triggered.
*
* @param chunkSize
* write buffer size
* @param theStatus
* flags when the read-process reached the last record or a
* checkpoint is required
* @return an array list of objects to write
*/
private List<Object> readAndProcess(int chunkSize, ItemStatus theStatus) {
logger.entering(sourceClass, "readAndProcess", new Object[] { chunkSize, theStatus });
List<Object> chunkToWrite = new ArrayList<Object>();
Object itemRead = null;
Object itemProcessed = null;
int readProcessedCount = 0;
while (true) {
ItemStatus status = new ItemStatus();
itemRead = readItem(status);
if (status.isRollback()) {
theStatus.setRollback(true);
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
break;
}
if (!status.isSkipped() && !status.isFinished()) {
itemProcessed = processItem(itemRead, status);
if (status.isRollback()) {
theStatus.setRollback(true);
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
break;
}
if (!status.isSkipped() && !status.isFiltered()) {
chunkToWrite.add(itemProcessed);
readProcessedCount++;
}
}
theStatus.setFinished(status.isFinished());
theStatus.setCheckPointed(checkpointManager.ApplyCheckPointPolicy());
// This will force the current item to finish processing on a stop
// request
if (stepContext.getBatchStatus().equals(BatchStatus.STOPPING)) {
theStatus.setFinished(true);
}
// write buffer size reached
if ((readProcessedCount == chunkSize) && (checkpointProxy.getCheckpointType() != "custom")) {
break;
}
// checkpoint reached
if (theStatus.isCheckPointed()) {
break;
}
// last record in readerProxy reached
if (theStatus.isFinished()) {
break;
}
}
logger.exiting(sourceClass, "readAndProcess", chunkToWrite);
return chunkToWrite;
}
/**
* Reads an item from the reader
*
* @param status
* flags the current read status
* @return the item read
*/
private Object readItem(ItemStatus status) {
logger.entering(sourceClass, "readItem", status);
Object itemRead = null;
try {
// call read listeners before and after the actual read
for (ItemReadListenerProxy readListenerProxy : itemReadListeners) {
readListenerProxy.beforeRead();
}
itemRead = readerProxy.readItem();
for (ItemReadListenerProxy readListenerProxy : itemReadListeners) {
readListenerProxy.afterRead(itemRead);
}
// itemRead == null means we reached the end of
// the readerProxy "resultset"
status.setFinished(itemRead == null);
if (!status.isFinished()) {
stepContext.getMetric(MetricImpl.MetricType.READ_COUNT).incValue();
}
} catch (Exception e) {
stepContext.setException(e);
for (ItemReadListenerProxy readListenerProxy : itemReadListeners) {
readListenerProxy.onReadError(e);
}
if(!rollbackRetry) {
if (retryReadException(e)) {
for (ItemReadListenerProxy readListenerProxy : itemReadListeners) {
readListenerProxy.onReadError(e);
}
// if not a rollback exception, just retry the current item
if (!retryHandler.isRollbackException(e)) {
itemRead = readItem(status);
} else {
status.setRollback(true);
rollbackRetry = true;
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
}
}
else if(skipReadException(e)) {
status.setSkipped(true);
stepContext.getMetric(MetricImpl.MetricType.READ_SKIP_COUNT).incValue();
}
else {
throw new BatchContainerRuntimeException(e);
}
}
else {
// coming from a rollback retry
if(skipReadException(e)) {
status.setSkipped(true);
stepContext.getMetric(MetricImpl.MetricType.READ_SKIP_COUNT).incValue();
}
else if (retryReadException(e)) {
if (!retryHandler.isRollbackException(e)) {
itemRead = readItem(status);
}
else {
status.setRollback(true);
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
}
}
else {
throw new BatchContainerRuntimeException(e);
}
}
} catch (Throwable e) {
throw new BatchContainerRuntimeException(e);
}
logger.exiting(sourceClass, "readItem", itemRead==null ? "<null>" : itemRead);
return itemRead;
}
/**
* Process an item previously read by the reader
*
* @param itemRead
* the item read
* @param status
* flags the current process status
* @return the processed item
*/
private Object processItem(Object itemRead, ItemStatus status) {
logger.entering(sourceClass, "processItem", new Object[] { itemRead, status });
Object processedItem = null;
// if no processor defined for this chunk
if (processorProxy == null){
return itemRead;
}
try {
// call process listeners before and after the actual process call
for (ItemProcessListenerProxy processListenerProxy : itemProcessListeners) {
processListenerProxy.beforeProcess(itemRead);
}
processedItem = processorProxy.processItem(itemRead);
if (processedItem == null) {
// inc filterCount
stepContext.getMetric(MetricImpl.MetricType.FILTER_COUNT).incValue();
status.setFiltered(true);
}
for (ItemProcessListenerProxy processListenerProxy : itemProcessListeners) {
processListenerProxy.afterProcess(itemRead, processedItem);
}
} catch (Exception e) {
for (ItemProcessListenerProxy processListenerProxy : itemProcessListeners) {
processListenerProxy.onProcessError(processedItem, e);
}
if(!rollbackRetry) {
if (retryProcessException(e, itemRead)) {
if (!retryHandler.isRollbackException(e)) {
// call process listeners before and after the actual
// process call
for (ItemProcessListenerProxy processListenerProxy : itemProcessListeners) {
processListenerProxy.beforeProcess(itemRead);
}
processedItem = processItem(itemRead, status);
if (processedItem == null) {
// inc filterCount
stepContext.getMetric(MetricImpl.MetricType.FILTER_COUNT).incValue();
status.setFiltered(true);
}
for (ItemProcessListenerProxy processListenerProxy : itemProcessListeners) {
processListenerProxy.afterProcess(itemRead, processedItem);
}
} else {
status.setRollback(true);
rollbackRetry = true;
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
}
}
else if (skipProcessException(e, itemRead)) {
status.setSkipped(true);
stepContext.getMetric(MetricImpl.MetricType.PROCESS_SKIP_COUNT).incValue();
}
else {
throw new BatchContainerRuntimeException(e);
}
}
else {
if (skipProcessException(e, itemRead)) {
status.setSkipped(true);
stepContext.getMetric(MetricImpl.MetricType.PROCESS_SKIP_COUNT).incValue();
} else if (retryProcessException(e, itemRead)) {
if (!retryHandler.isRollbackException(e)) {
// call process listeners before and after the actual
// process call
for (ItemProcessListenerProxy processListenerProxy : itemProcessListeners) {
processListenerProxy.beforeProcess(itemRead);
}
processedItem = processItem(itemRead, status);
if (processedItem == null) {
// inc filterCount
stepContext.getMetric(MetricImpl.MetricType.FILTER_COUNT).incValue();
status.setFiltered(true);
}
for (ItemProcessListenerProxy processListenerProxy : itemProcessListeners) {
processListenerProxy.afterProcess(itemRead, processedItem);
}
} else {
status.setRollback(true);
rollbackRetry = true;
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
}
} else {
throw new BatchContainerRuntimeException(e);
}
}
} catch (Throwable e) {
throw new BatchContainerRuntimeException(e);
}
logger.exiting(sourceClass, "processItem", processedItem==null ? "<null>" : processedItem);
return processedItem;
}
/**
* Writes items
*
* @param theChunk
* the array list with all items processed ready to be written
*/
private void writeChunk(List<Object> theChunk, ItemStatus status) {
logger.entering(sourceClass, "writeChunk", theChunk);
if (!theChunk.isEmpty()) {
try {
// call read listeners before and after the actual read
for (ItemWriteListenerProxy writeListenerProxy : itemWriteListeners) {
writeListenerProxy.beforeWrite(theChunk);
}
writerProxy.writeItems(theChunk);
for (ItemWriteListenerProxy writeListenerProxy : itemWriteListeners) {
writeListenerProxy.afterWrite(theChunk);
}
stepContext.getMetric(MetricImpl.MetricType.WRITE_COUNT).incValueBy(theChunk.size());
} catch (Exception e) {
this.stepContext.setException(e);
for (ItemWriteListenerProxy writeListenerProxy : itemWriteListeners) {
writeListenerProxy.onWriteError(theChunk, e);
}
if(!rollbackRetry)
{
if (retryWriteException(e, theChunk)) {
if (!retryHandler.isRollbackException(e)) {
writeChunk(theChunk, status);
} else {
rollbackRetry = true;
status.setRollback(true);
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
}
} else if (skipWriteException(e, theChunk)) {
stepContext.getMetric(MetricImpl.MetricType.WRITE_SKIP_COUNT).incValueBy(1);
} else {
throw new BatchContainerRuntimeException(e);
}
}
else {
if (skipWriteException(e, theChunk)) {
stepContext.getMetric(MetricImpl.MetricType.WRITE_SKIP_COUNT).incValueBy(1);
} else if (retryWriteException(e, theChunk)) {
if (!retryHandler.isRollbackException(e)) {
status.setRetry(true);
writeChunk(theChunk, status);
} else {
rollbackRetry = true;
status.setRollback(true);
// inc rollbackCount
stepContext.getMetric(MetricImpl.MetricType.ROLLBACK_COUNT).incValue();
}
} else {
throw new BatchContainerRuntimeException(e);
}
}
} catch (Throwable e) {
throw new BatchContainerRuntimeException(e);
}
}
logger.exiting(sourceClass, "writeChunk");
}
/**
* Main Read-Process-Write loop
*
* @throws Exception
*/
private void invokeChunk() throws Exception {
logger.entering(sourceClass, "invokeChunk2");
int itemCount = ChunkHelper.getItemCount(chunk);
int timeInterval = ChunkHelper.getTimeLimit(chunk);
List<Object> chunkToWrite = new ArrayList<Object>();
boolean checkPointed = true;
boolean rollback = false;
// begin new transaction at first iteration or after a checkpoint commit
try {
transactionManager.begin();
this.openReaderAndWriter();
transactionManager.commit();
while (true) {
if (checkPointed || rollback) {
if (this.checkpointProxy.getCheckpointType() == "custom" ){
int newtimeOut = this.checkpointManager.checkpointTimeout();
transactionManager.setTransactionTimeout(newtimeOut);
}
transactionManager.begin();
for (ChunkListenerProxy chunkProxy : chunkListeners) {
chunkProxy.beforeChunk();
}
if (rollback) {
positionReaderAtCheckpoint();
positionWriterAtCheckpoint();
checkpointManager = new CheckpointManager(readerProxy, writerProxy,
getCheckpointAlgorithm(itemCount, timeInterval), jobExecutionImpl.getExecutionId(), jobExecutionImpl
.getJobInstance().getInstanceId(), step.getId());
}
}
ItemStatus status = new ItemStatus();
if (rollback) {
rollback = false;
}
chunkToWrite = readAndProcess(itemCount, status);
if (status.isRollback()) {
itemCount = 1;
rollback = true;
transactionManager.rollback();
continue;
}
writeChunk(chunkToWrite, status);
if (status.isRollback()) {
itemCount = 1;
rollback = true;
transactionManager.rollback();
continue;
}
checkPointed = status.isCheckPointed();
// we could finish the chunk in 3 conditions: buffer is full,
// checkpoint, not more input
if (status.isCheckPointed() || status.isFinished()) {
// TODO: missing before checkpoint listeners
// 1.- check if spec list proper steps for before checkpoint
// 2.- ask Andy about retry
// 3.- when do we stop?
checkpointManager.checkpoint();
for (ChunkListenerProxy chunkProxy : chunkListeners) {
chunkProxy.afterChunk();
}
this.persistUserData();
this.chkptAlg.beginCheckpoint();
transactionManager.commit();
this.chkptAlg.endCheckpoint();
invokeCollectorIfPresent();
// exit loop when last record is written
if (status.isFinished()) {
transactionManager.begin();
readerProxy.close();
writerProxy.close();
transactionManager.commit();
// increment commitCount
stepContext.getMetric(MetricImpl.MetricType.COMMIT_COUNT).incValue();
break;
} else {
// increment commitCount
stepContext.getMetric(MetricImpl.MetricType.COMMIT_COUNT).incValue();
}
}
}
} catch (Exception e) {
for (ChunkListenerProxy chunkProxy : chunkListeners) {
chunkProxy.onError(e);
}
transactionManager.rollback();
logger.warning("Caught exception in chunk processing");
throw e;
} catch (Throwable t) {
logger.log(Level.SEVERE, "Failure in Read-Process-Write Loop", t);
throw new BatchContainerRuntimeException("Failure in Read-Process-Write Loop", t);
}
logger.exiting(sourceClass, "invokeChunk");
}
protected void invokeCoreStep() throws BatchContainerServiceException {
this.chunk = step.getChunk();
initializeChunkArtifacts();
try {
invokeChunk();
} catch (Exception re) {
throw new BatchContainerServiceException(re);
}
}
private CheckpointAlgorithm getCheckpointAlgorithm(int itemCount, int timeInterval) {
CheckpointAlgorithm alg = null;
if (checkpointProxy.getCheckpointType() == "item") {
alg = new ItemCheckpointAlgorithm();
((ItemCheckpointAlgorithm) alg).setThresholds(itemCount, timeInterval);
} else { // custom chkpt alg
alg = (CheckpointAlgorithm) checkpointProxy;
}
return alg;
}
/*
* Initialize itemreader, itemwriter, and item processor checkpoint
*/
private void initializeChunkArtifacts() {
String sourceMethod = "initializeChunkArtifacts";
if (logger.isLoggable(Level.FINE))
logger.entering(sourceClass, sourceMethod);
int itemCount = ChunkHelper.getItemCount(chunk);
int timeInterval = ChunkHelper.getTimeLimit(chunk);
String checkpointPolicy = ChunkHelper.getCheckpointPolicy(chunk);
ItemReader itemReader = chunk.getReader();
List<Property> itemReaderProps = itemReader.getProperties() == null ? null : itemReader.getProperties().getPropertyList();
try {
InjectionReferences injectionRef = new InjectionReferences(jobExecutionImpl.getJobContext(), stepContext,
itemReaderProps);
readerProxy = ProxyFactory.createItemReaderProxy(itemReader.getRef(), injectionRef, stepContext);
if (logger.isLoggable(Level.FINE)) {
logger.fine("Created ItemReaderProxy for " + itemReader.getRef());
}
} catch (ArtifactValidationException e) {
throw new BatchContainerServiceException("Cannot create the ItemReader [" + itemReader.getRef() + "]", e);
}
ItemProcessor itemProcessor = chunk.getProcessor();
if (itemProcessor != null){
List<Property> itemProcessorProps = itemProcessor.getProperties() == null ? null : itemProcessor.getProperties().getPropertyList();
try {
InjectionReferences injectionRef = new InjectionReferences(jobExecutionImpl.getJobContext(), stepContext,
itemProcessorProps);
processorProxy = ProxyFactory.createItemProcessorProxy(itemProcessor.getRef(), injectionRef, stepContext);
if (logger.isLoggable(Level.FINE)) {
logger.fine("Created ItemProcessorProxy for " + itemProcessor.getRef());
}
} catch (ArtifactValidationException e) {
throw new BatchContainerServiceException("Cannot create the ItemProcessor [" + itemProcessor.getRef() + "]", e);
}
}
ItemWriter itemWriter = chunk.getWriter();
List<Property> itemWriterProps = itemWriter.getProperties() == null ? null : itemWriter.getProperties().getPropertyList();
try {
InjectionReferences injectionRef = new InjectionReferences(jobExecutionImpl.getJobContext(), stepContext,
itemWriterProps);
writerProxy = ProxyFactory.createItemWriterProxy(itemWriter.getRef(), injectionRef, stepContext);
if (logger.isLoggable(Level.FINE)) {
logger.fine("Created ItemWriterProxy for " + itemWriter.getRef());
}
} catch (ArtifactValidationException e) {
throw new BatchContainerServiceException("Cannot create the ItemWriter [" + itemWriter.getRef() + "]", e);
}
try {
List<Property> propList = null;
if (chunk.getCheckpointAlgorithm() != null) {
propList = (chunk.getCheckpointAlgorithm().getProperties() == null) ? null : chunk.getCheckpointAlgorithm().getProperties()
.getPropertyList();
}
InjectionReferences injectionRef = new InjectionReferences(jobExecutionImpl.getJobContext(), stepContext,
propList);
checkpointProxy = CheckpointAlgorithmFactory.getCheckpointAlgorithmProxy(step, injectionRef, stepContext);
if (logger.isLoggable(Level.FINE)) {
logger.fine("Created CheckpointAlgorithmProxy for policy [" + checkpointPolicy + "]");
}
} catch (ArtifactValidationException e) {
throw new BatchContainerServiceException("Cannot create the CheckpointAlgorithm for policy [" + chunk.getCheckpointPolicy()
+ "]", e);
}
InjectionReferences injectionRef = new InjectionReferences(jobExecutionImpl.getJobContext(), stepContext,
null);
this.chunkListeners = jobExecutionImpl.getListenerFactory().getChunkListeners(step, injectionRef, stepContext);
this.skipProcessListeners = jobExecutionImpl.getListenerFactory().getSkipProcessListeners(step, injectionRef, stepContext);
this.skipReadListeners = jobExecutionImpl.getListenerFactory().getSkipReadListeners(step, injectionRef, stepContext);
this.skipWriteListeners = jobExecutionImpl.getListenerFactory().getSkipWriteListeners(step, injectionRef, stepContext);
this.retryProcessListeners = jobExecutionImpl.getListenerFactory().getRetryProcessListeners(step, injectionRef, stepContext);
this.retryReadListeners = jobExecutionImpl.getListenerFactory().getRetryReadListeners(step, injectionRef, stepContext);
this.retryWriteListeners = jobExecutionImpl.getListenerFactory().getRetryWriteListeners(step, injectionRef, stepContext);
this.itemReadListeners = jobExecutionImpl.getListenerFactory().getItemReadListeners(step, injectionRef, stepContext);
this.itemProcessListeners = jobExecutionImpl.getListenerFactory().getItemProcessListeners(step, injectionRef, stepContext);
this.itemWriteListeners = jobExecutionImpl.getListenerFactory().getItemWriteListeners(step, injectionRef, stepContext);
if (checkpointProxy.getCheckpointType() == "item") {
chkptAlg = new ItemCheckpointAlgorithm();
((ItemCheckpointAlgorithm) chkptAlg).setThresholds(itemCount, timeInterval);
} else { // custom chkpt alg
chkptAlg = (CheckpointAlgorithm) checkpointProxy;
}
if (logger.isLoggable(Level.FINE)) {
logger.fine("Setting contexts for chunk artifacts");
}
if (logger.isLoggable(Level.FINE))
logger.fine("Initialize checkpoint manager with item-count=" + itemCount);
logger.fine("Initialize checkpoint manager with time-interval=" + timeInterval);
checkpointManager = new CheckpointManager(readerProxy, writerProxy, chkptAlg, jobExecutionImpl.getExecutionId(), jobExecutionImpl
.getJobInstance().getInstanceId(), step.getId());
skipHandler = new SkipHandler(chunk, jobExecutionImpl.getJobInstance().getInstanceId(), step.getId());
skipHandler.addSkipProcessListener(skipProcessListeners);
skipHandler.addSkipReadListener(skipReadListeners);
skipHandler.addSkipWriteListener(skipWriteListeners);
retryHandler = new RetryHandler(chunk, jobExecutionImpl.getJobInstance().getInstanceId(), step.getId());
retryHandler.addRetryProcessListener(retryProcessListeners);
retryHandler.addRetryReadListener(retryReadListeners);
retryHandler.addRetryWriteListener(retryWriteListeners);
if (logger.isLoggable(Level.FINE))
logger.exiting(sourceClass, sourceMethod);
}
private void openReaderAndWriter() {
String sourceMethod = "openReaderAndWriter";
if (logger.isLoggable(Level.FINE))
logger.entering(sourceClass, sourceMethod);
_persistenceManagerService = servicesManager.getPersistenceManagerService();
readerChkptDK = new CheckpointDataKey(jobExecutionImpl.getJobInstance().getInstanceId(), step.getId(), "READER");
CheckpointData readerChkptData = _persistenceManagerService.getCheckpointData(readerChkptDK);
try {
// check for data in backing store
if (readerChkptData != null) {
byte[] readertoken = readerChkptData.getRestartToken();
ByteArrayInputStream readerChkptBA = new ByteArrayInputStream(readertoken);
TCCLObjectInputStream readerOIS = null;
try {
readerOIS = new TCCLObjectInputStream(readerChkptBA);
readerProxy.open((Serializable) readerOIS.readObject());
readerOIS.close();
} catch (Exception ex) {
// is this what I should be throwing here?
throw new BatchContainerServiceException("Cannot persist the checkpoint data for [" + step.getId() + "]", ex);
}
} else {
// no chkpt data exists in the backing store
readerChkptData = null;
readerProxy.open(null);
}
} catch (ClassCastException e) {
logger.warning("Expected CheckpointData but found" + readerChkptData );
throw new IllegalStateException("Expected CheckpointData but found" + readerChkptData );
}
writerChkptDK = new CheckpointDataKey(jobExecutionImpl.getJobInstance().getInstanceId(), step.getId(), "WRITER");
CheckpointData writerChkptData = _persistenceManagerService.getCheckpointData(writerChkptDK);
try {
// check for data in backing store
if (writerChkptData != null) {
byte[] writertoken = writerChkptData.getRestartToken();
ByteArrayInputStream writerChkptBA = new ByteArrayInputStream(writertoken);
TCCLObjectInputStream writerOIS = null;
try {
writerOIS = new TCCLObjectInputStream(writerChkptBA);
writerProxy.open((Serializable) writerOIS.readObject());
writerOIS.close();
} catch (Exception ex) {
// is this what I should be throwing here?
throw new BatchContainerServiceException("Cannot persist the checkpoint data for [" + step.getId() + "]", ex);
}
} else {
// no chkpt data exists in the backing store
writerChkptData = null;
writerProxy.open(null);
}
} catch (ClassCastException e) {
logger.warning("Expected Checkpoint but found" + writerChkptData);
throw new IllegalStateException("Expected Checkpoint but found" + writerChkptData);
}
// set up metrics
// stepContext.addMetric(MetricImpl.Counter.valueOf("READ_COUNT"), 0);
// stepContext.addMetric(MetricImpl.Counter.valueOf("WRITE_COUNT"), 0);
// stepContext.addMetric(MetricImpl.Counter.valueOf("READ_SKIP_COUNT"),
// 0);
// stepContext.addMetric(MetricImpl.Counter.valueOf("PROCESS_SKIP_COUNT"),
// 0);
// stepContext.addMetric(MetricImpl.Counter.valueOf("WRITE_SKIP_COUNT"),
// 0);
if (logger.isLoggable(Level.FINE))
logger.exiting(sourceClass, sourceMethod);
}
@Override
public void stop() {
stepContext.setBatchStatus(BatchStatus.STOPPING);
// we don't need to call stop on the chunk implementation here since a
// chunk always returns control to
// the batch container after every item.
}
boolean skipReadException(Exception e) {
try {
skipHandler.handleExceptionRead(e);
} catch (BatchContainerRuntimeException bcre) {
return false;
}
return true;
}
boolean retryReadException(Exception e) {
try {
retryHandler.handleExceptionRead(e);
} catch (BatchContainerRuntimeException bcre) {
return false;
}
return true;
}
boolean skipProcessException(Exception e, Object record) {
try {
skipHandler.handleExceptionWithRecordProcess(e, record);
} catch (BatchContainerRuntimeException bcre) {
return false;
}
return true;
}
boolean retryProcessException(Exception e, Object record) {
try {
retryHandler.handleExceptionProcess(e, record);
} catch (BatchContainerRuntimeException bcre) {
return false;
}
return true;
}
boolean skipWriteException(Exception e, List<Object> chunkToWrite) {
try {
skipHandler.handleExceptionWithRecordListWrite(e, chunkToWrite);
} catch (BatchContainerRuntimeException bcre) {
return false;
}
return true;
}
boolean retryWriteException(Exception e, List<Object> chunkToWrite) {
try {
retryHandler.handleExceptionWrite(e, chunkToWrite);
} catch (BatchContainerRuntimeException bcre) {
return false;
}
return true;
}
private void positionReaderAtCheckpoint() {
_persistenceManagerService = servicesManager.getPersistenceManagerService();
readerChkptDK = new CheckpointDataKey(jobExecutionImpl.getJobInstance().getInstanceId(), step.getId(), "READER");
CheckpointData readerData = _persistenceManagerService.getCheckpointData(readerChkptDK);
try {
// check for data in backing store
if (readerData != null) {
byte[] readertoken = readerData.getRestartToken();
ByteArrayInputStream readerChkptBA = new ByteArrayInputStream(readertoken);
TCCLObjectInputStream readerOIS = null;
try {
readerOIS = new TCCLObjectInputStream(readerChkptBA);
readerProxy.open((Serializable) readerOIS.readObject());
readerOIS.close();
} catch (Exception ex) {
// is this what I should be throwing here?
throw new BatchContainerServiceException("Cannot persist the checkpoint data for [" + step.getId() + "]", ex);
}
} else {
// no chkpt data exists in the backing store
readerData = null;
readerProxy.open(null);
}
} catch (ClassCastException e) {
throw new IllegalStateException("Expected CheckpointData but found" + readerData);
}
}
private void positionWriterAtCheckpoint() {
_persistenceManagerService = servicesManager.getPersistenceManagerService();
writerChkptDK = new CheckpointDataKey(jobExecutionImpl.getJobInstance().getInstanceId(), step.getId(), "WRITER");
CheckpointData writerData = _persistenceManagerService.getCheckpointData(writerChkptDK);
try {
// check for data in backing store
if (writerData != null) {
byte[] writertoken = writerData.getRestartToken();
ByteArrayInputStream writerChkptBA = new ByteArrayInputStream(writertoken);
TCCLObjectInputStream writerOIS = null;
try {
writerOIS = new TCCLObjectInputStream(writerChkptBA);
writerProxy.open((Serializable) writerOIS.readObject());
writerOIS.close();
} catch (Exception ex) {
// is this what I should be throwing here?
throw new BatchContainerServiceException("Cannot persist the checkpoint data for [" + step.getId() + "]", ex);
}
} else {
// no chkpt data exists in the backing store
writerData = null;
writerProxy.open(null);
}
} catch (ClassCastException e) {
throw new IllegalStateException("Expected CheckpointData but found" + writerData);
}
}
}
| |
/*
* Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.data.scenario;
import static com.opengamma.strata.collect.Guavate.toImmutableList;
import static com.opengamma.strata.collect.TestHelper.coverBeanEquals;
import static com.opengamma.strata.collect.TestHelper.coverImmutableBean;
import static com.opengamma.strata.collect.TestHelper.date;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import java.time.LocalDate;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.opengamma.strata.collect.array.DoubleArray;
import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries;
import com.opengamma.strata.data.MarketData;
import com.opengamma.strata.data.MarketDataId;
import com.opengamma.strata.data.MarketDataName;
import com.opengamma.strata.data.MarketDataNotFoundException;
import com.opengamma.strata.data.ObservableId;
/**
* Test {@link ScenarioMarketData} and {@link ImmutableScenarioMarketData}.
*/
public class ScenarioMarketDataTest {
private static final LocalDate VAL_DATE = date(2015, 6, 30);
private static final TestObservableId ID1 = TestObservableId.of("1");
private static final TestObservableId ID2 = TestObservableId.of("2");
private static final double VAL1 = 1d;
private static final double VAL2 = 2d;
private static final double VAL3 = 3d;
private static final MarketDataBox<Double> BOX1 = MarketDataBox.ofScenarioValues(VAL1, VAL2);
private static final MarketDataBox<Double> BOX2 = MarketDataBox.ofScenarioValues(VAL3);
private static final LocalDateDoubleTimeSeries TIME_SERIES = LocalDateDoubleTimeSeries.builder()
.put(date(2011, 3, 8), 1.1)
.put(date(2011, 3, 10), 1.2)
.build();
//-------------------------------------------------------------------------
@Test
public void test_of() {
Map<MarketDataId<?>, MarketDataBox<?>> dataMap = ImmutableMap.of(ID1, BOX1);
Map<ObservableId, LocalDateDoubleTimeSeries> tsMap = ImmutableMap.of(ID1, TIME_SERIES);
ScenarioMarketData test = ScenarioMarketData.of(2, VAL_DATE, dataMap, tsMap);
assertThat(test.getValuationDate()).isEqualTo(MarketDataBox.ofSingleValue(VAL_DATE));
assertThat(test.containsValue(ID1)).isTrue();
assertThat(test.containsValue(ID2)).isFalse();
assertThat(test.getValue(ID1)).isEqualTo(BOX1);
assertThatExceptionOfType(MarketDataNotFoundException.class).isThrownBy(() -> test.getValue(ID2));
assertThat(test.findValue(ID1)).hasValue(BOX1);
assertThat(test.findValue(ID2)).isEmpty();
assertThat(test.getIds()).isEqualTo(ImmutableSet.of(ID1));
assertThat(test.getTimeSeries(ID1)).isEqualTo(TIME_SERIES);
assertThat(test.getTimeSeries(ID2)).isEqualTo(LocalDateDoubleTimeSeries.empty());
}
@Test
public void test_of_noScenarios() {
Map<MarketDataId<?>, MarketDataBox<?>> dataMap = ImmutableMap.of(ID1, MarketDataBox.empty());
ScenarioMarketData test = ScenarioMarketData.of(0, VAL_DATE, dataMap, ImmutableMap.of());
assertThat(test.getValuationDate()).isEqualTo(MarketDataBox.ofSingleValue(VAL_DATE));
assertThat(test.containsValue(ID1)).isTrue();
assertThat(test.containsValue(ID2)).isFalse();
assertThat(test.getValue(ID1)).isEqualTo(MarketDataBox.empty());
assertThatExceptionOfType(MarketDataNotFoundException.class).isThrownBy(() -> test.getValue(ID2));
assertThat(test.findValue(ID1)).hasValue(MarketDataBox.empty());
assertThat(test.findValue(ID2)).isEmpty();
assertThat(test.getIds()).isEqualTo(ImmutableSet.of(ID1));
assertThat(test.getTimeSeries(ID1)).isEqualTo(LocalDateDoubleTimeSeries.empty());
assertThat(test.getTimeSeries(ID2)).isEqualTo(LocalDateDoubleTimeSeries.empty());
}
@Test
public void test_of_repeated() {
ScenarioMarketData test = ScenarioMarketData.of(1, MarketData.of(VAL_DATE, ImmutableMap.of(ID1, VAL1)));
assertThat(test.getValuationDate()).isEqualTo(MarketDataBox.ofSingleValue(VAL_DATE));
assertThat(test.getValue(ID1)).isEqualTo(MarketDataBox.ofSingleValue(VAL1));
}
@Test
public void test_empty() {
ScenarioMarketData test = ScenarioMarketData.empty();
assertThat(test.getValuationDate()).isEqualTo(MarketDataBox.empty());
assertThat(test.containsValue(ID1)).isFalse();
assertThat(test.containsValue(ID2)).isFalse();
assertThatExceptionOfType(MarketDataNotFoundException.class).isThrownBy(() -> test.getValue(ID1));
assertThatExceptionOfType(MarketDataNotFoundException.class).isThrownBy(() -> test.getValue(ID2));
assertThat(test.findValue(ID1)).isEmpty();
assertThat(test.findValue(ID2)).isEmpty();
assertThat(test.getIds()).isEqualTo(ImmutableSet.of());
assertThat(test.getTimeSeries(ID1)).isEqualTo(LocalDateDoubleTimeSeries.empty());
assertThat(test.getTimeSeries(ID2)).isEqualTo(LocalDateDoubleTimeSeries.empty());
}
@Test
public void of_null() {
Map<MarketDataId<?>, MarketDataBox<?>> dataMap = new HashMap<>();
dataMap.put(ID1, null);
Map<ObservableId, LocalDateDoubleTimeSeries> tsMap = ImmutableMap.of(ID1, TIME_SERIES);
assertThatIllegalArgumentException().isThrownBy(() -> ScenarioMarketData.of(2, VAL_DATE, dataMap, tsMap));
}
@Test
public void of_badType() {
Map<MarketDataId<?>, MarketDataBox<?>> dataMap = ImmutableMap.of(ID1, MarketDataBox.ofScenarioValues("", ""));
Map<ObservableId, LocalDateDoubleTimeSeries> tsMap = ImmutableMap.of(ID1, TIME_SERIES);
assertThatExceptionOfType(ClassCastException.class)
.isThrownBy(() -> ScenarioMarketData.of(2, VAL_DATE, dataMap, tsMap));
}
@Test
public void of_badScenarios() {
Map<MarketDataId<?>, MarketDataBox<?>> dataMap = ImmutableMap.of(ID1, MarketDataBox.ofScenarioValues(VAL1));
Map<ObservableId, LocalDateDoubleTimeSeries> tsMap = ImmutableMap.of(ID1, TIME_SERIES);
assertThatIllegalArgumentException().isThrownBy(() -> ScenarioMarketData.of(2, VAL_DATE, dataMap, tsMap));
}
//-------------------------------------------------------------------------
@Test
public void test_defaultMethods() {
ScenarioMarketData test = new ScenarioMarketData() {
@Override
public MarketDataBox<LocalDate> getValuationDate() {
return MarketDataBox.ofSingleValue(VAL_DATE);
}
@Override
public LocalDateDoubleTimeSeries getTimeSeries(ObservableId id) {
return LocalDateDoubleTimeSeries.empty();
}
@Override
public int getScenarioCount() {
return 2;
}
@Override
@SuppressWarnings("unchecked")
public <T> Optional<MarketDataBox<T>> findValue(MarketDataId<T> id) {
return id.equals(ID1) ? Optional.of((MarketDataBox<T>) BOX1) : Optional.empty();
}
@Override
public Set<MarketDataId<?>> getIds() {
return ImmutableSet.of();
}
@Override
public <T> Set<MarketDataId<T>> findIds(MarketDataName<T> name) {
return ImmutableSet.of();
}
@Override
public Set<ObservableId> getTimeSeriesIds() {
return ImmutableSet.of();
}
};
assertThat(test.getValuationDate()).isEqualTo(MarketDataBox.ofSingleValue(VAL_DATE));
assertThat(test.containsValue(ID1)).isTrue();
assertThat(test.containsValue(ID2)).isFalse();
assertThat(test.getValue(ID1)).isEqualTo(BOX1);
assertThatExceptionOfType(MarketDataNotFoundException.class).isThrownBy(() -> test.getValue(ID2));
assertThat(test.findValue(ID1)).hasValue(BOX1);
assertThat(test.findValue(ID2)).isEmpty();
}
//-------------------------------------------------------------------------
@Test
public void test_scenarios() {
Map<MarketDataId<?>, MarketDataBox<?>> dataMap = ImmutableMap.of(ID1, BOX1);
Map<ObservableId, LocalDateDoubleTimeSeries> tsMap = ImmutableMap.of(ID1, TIME_SERIES);
ScenarioMarketData test = ScenarioMarketData.of(2, VAL_DATE, dataMap, tsMap);
MarketData scenario0 = test.scenario(0);
MarketData scenario1 = test.scenario(1);
assertThat(scenario0.getValue(ID1)).isEqualTo(BOX1.getValue(0));
assertThat(scenario1.getValue(ID1)).isEqualTo(BOX1.getValue(1));
List<Double> list = test.scenarios().map(s -> s.getValue(ID1)).collect(toImmutableList());
assertThat(list.get(0)).isEqualTo(BOX1.getValue(0));
assertThat(list.get(1)).isEqualTo(BOX1.getValue(1));
}
//-------------------------------------------------------------------------
@Test
public void coverage() {
Map<MarketDataId<?>, MarketDataBox<?>> dataMap = ImmutableMap.of(ID1, BOX1);
Map<ObservableId, LocalDateDoubleTimeSeries> tsMap = ImmutableMap.of(ID1, TIME_SERIES);
ImmutableScenarioMarketData test = ImmutableScenarioMarketData.of(2, VAL_DATE, dataMap, tsMap);
coverImmutableBean(test);
Map<MarketDataId<?>, MarketDataBox<?>> dataMap2 = ImmutableMap.of(ID2, BOX2);
Map<ObservableId, LocalDateDoubleTimeSeries> tsMap2 = ImmutableMap.of(ID2, TIME_SERIES);
ImmutableScenarioMarketData test2 = ImmutableScenarioMarketData.of(1, VAL_DATE.plusDays(1), dataMap2, tsMap2);
coverBeanEquals(test, test2);
}
//-------------------------------------------------------------------------
@Test
public void getScenarioValueFromSingleValue() {
MarketDataBox<Double> box = MarketDataBox.ofSingleValue(9d);
TestMarketData marketData = new TestMarketData(box);
TestArrayKey key = new TestArrayKey();
TestDoubleArray array = marketData.getScenarioValue(key);
assertThat(array.values).isEqualTo(DoubleArray.of(9, 9, 9));
}
@Test
public void getScenarioValueFromRequestedScenarioValue() {
MarketDataBox<Double> box = MarketDataBox.ofScenarioValue(new TestDoubleArray(DoubleArray.of(9d, 9d, 9d)));
TestMarketData marketData = new TestMarketData(box);
TestArrayKey key = new TestArrayKey();
TestDoubleArray array = marketData.getScenarioValue(key);
assertThat(array.values).isEqualTo(DoubleArray.of(9, 9, 9));
}
@Test
public void getScenarioValueFromOtherScenarioValue() {
MarketDataBox<Double> box = MarketDataBox.ofScenarioValues(9d, 9d, 9d);
TestMarketData marketData = new TestMarketData(box);
TestArrayKey key = new TestArrayKey();
TestDoubleArray array = marketData.getScenarioValue(key);
assertThat(array.values).isEqualTo(DoubleArray.of(9, 9, 9));
}
//--------------------------------------------------------------------------------------------------
private static final class TestDoubleArray implements ScenarioArray<Double> {
private final DoubleArray values;
private TestDoubleArray(DoubleArray values) {
this.values = values;
}
@Override
public Double get(int scenarioIndex) {
return values.get(scenarioIndex);
}
@Override
public int getScenarioCount() {
return values.size();
}
@Override
public Stream<Double> stream() {
return values.stream().boxed();
}
}
//--------------------------------------------------------------------------------------------------
private static final class TestId implements MarketDataId<Double> {
@Override
public Class<Double> getMarketDataType() {
return Double.class;
}
}
//--------------------------------------------------------------------------------------------------
private static final class TestArrayKey implements ScenarioMarketDataId<Double, TestDoubleArray> {
@Override
public MarketDataId<Double> getMarketDataId() {
return new TestId();
}
@Override
public Class<TestDoubleArray> getScenarioMarketDataType() {
return TestDoubleArray.class;
}
@Override
public TestDoubleArray createScenarioValue(MarketDataBox<Double> marketDataBox, int scenarioCount) {
return new TestDoubleArray(DoubleArray.of(scenarioCount, i -> marketDataBox.getValue(i)));
}
}
//--------------------------------------------------------------------------------------------------
private static final class TestMarketData implements ScenarioMarketData {
private final MarketDataBox<?> value;
private TestMarketData(MarketDataBox<?> value) {
this.value = value;
}
@Override
public MarketDataBox<LocalDate> getValuationDate() {
throw new UnsupportedOperationException("getValuationDate() not implemented");
}
@Override
public int getScenarioCount() {
return 3;
}
@Override
public Stream<MarketData> scenarios() {
throw new UnsupportedOperationException("scenarios() not implemented");
}
@Override
public MarketData scenario(int scenarioIndex) {
throw new UnsupportedOperationException("scenario(int) not implemented");
}
@Override
public boolean containsValue(MarketDataId<?> id) {
throw new UnsupportedOperationException("containsValue(MarketDataKey) not implemented");
}
@Override
@SuppressWarnings("unchecked")
public <T> MarketDataBox<T> getValue(MarketDataId<T> id) {
return (MarketDataBox<T>) value;
}
@Override
public <T> Optional<MarketDataBox<T>> findValue(MarketDataId<T> id) {
throw new UnsupportedOperationException("findValue not implemented");
}
@Override
public Set<MarketDataId<?>> getIds() {
return ImmutableSet.of();
}
@Override
public <T> Set<MarketDataId<T>> findIds(MarketDataName<T> name) {
return ImmutableSet.of();
}
@Override
public LocalDateDoubleTimeSeries getTimeSeries(ObservableId id) {
throw new UnsupportedOperationException("getTimeSeries(ObservableKey) not implemented");
}
@Override
public Set<ObservableId> getTimeSeriesIds() {
return ImmutableSet.of();
}
}
}
| |
/*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jdbc.core.simple;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.dao.InvalidDataAccessResourceUsageException;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.ConnectionCallback;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCreator;
import org.springframework.jdbc.core.SqlTypeValue;
import org.springframework.jdbc.core.StatementCreatorUtils;
import org.springframework.jdbc.core.metadata.TableMetaDataContext;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.jdbc.support.JdbcUtils;
import org.springframework.jdbc.support.KeyHolder;
import org.springframework.jdbc.support.nativejdbc.NativeJdbcExtractor;
import org.springframework.util.Assert;
/**
* Abstract class to provide base functionality for easy inserts
* based on configuration options and database metadata.
* This class provides the base SPI for {@link SimpleJdbcInsert}.
*
* @author Thomas Risberg
* @author Juergen Hoeller
* @since 2.5
*/
public abstract class AbstractJdbcInsert {
/** Logger available to subclasses */
protected final Log logger = LogFactory.getLog(getClass());
/** Lower-level class used to execute SQL */
private final JdbcTemplate jdbcTemplate;
/** Context used to retrieve and manage database metadata */
private final TableMetaDataContext tableMetaDataContext = new TableMetaDataContext();
/** List of columns objects to be used in insert statement */
private final List<String> declaredColumns = new ArrayList<String>();
/** The names of the columns holding the generated key */
private String[] generatedKeyNames = new String[0];
/**
* Has this operation been compiled? Compilation means at least checking
* that a DataSource or JdbcTemplate has been provided.
*/
private boolean compiled = false;
/** The generated string used for insert statement */
private String insertString;
/** The SQL type information for the insert columns */
private int[] insertTypes;
/**
* Constructor to be used when initializing using a {@link DataSource}.
* @param dataSource the DataSource to be used
*/
protected AbstractJdbcInsert(DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
/**
* Constructor to be used when initializing using a {@link JdbcTemplate}.
* @param jdbcTemplate the JdbcTemplate to use
*/
protected AbstractJdbcInsert(JdbcTemplate jdbcTemplate) {
Assert.notNull(jdbcTemplate, "JdbcTemplate must not be null");
this.jdbcTemplate = jdbcTemplate;
setNativeJdbcExtractor(jdbcTemplate.getNativeJdbcExtractor());
}
//-------------------------------------------------------------------------
// Methods dealing with configuration properties
//-------------------------------------------------------------------------
/**
* Get the configured {@link JdbcTemplate}.
*/
public JdbcTemplate getJdbcTemplate() {
return this.jdbcTemplate;
}
/**
* Set the name of the table for this insert.
*/
public void setTableName(String tableName) {
checkIfConfigurationModificationIsAllowed();
this.tableMetaDataContext.setTableName(tableName);
}
/**
* Get the name of the table for this insert.
*/
public String getTableName() {
return this.tableMetaDataContext.getTableName();
}
/**
* Set the name of the schema for this insert.
*/
public void setSchemaName(String schemaName) {
checkIfConfigurationModificationIsAllowed();
this.tableMetaDataContext.setSchemaName(schemaName);
}
/**
* Get the name of the schema for this insert.
*/
public String getSchemaName() {
return this.tableMetaDataContext.getSchemaName();
}
/**
* Set the name of the catalog for this insert.
*/
public void setCatalogName(String catalogName) {
checkIfConfigurationModificationIsAllowed();
this.tableMetaDataContext.setCatalogName(catalogName);
}
/**
* Get the name of the catalog for this insert.
*/
public String getCatalogName() {
return this.tableMetaDataContext.getCatalogName();
}
/**
* Set the names of the columns to be used.
*/
public void setColumnNames(List<String> columnNames) {
checkIfConfigurationModificationIsAllowed();
this.declaredColumns.clear();
this.declaredColumns.addAll(columnNames);
}
/**
* Get the names of the columns used.
*/
public List<String> getColumnNames() {
return Collections.unmodifiableList(this.declaredColumns);
}
/**
* Specify the name of a single generated key column.
*/
public void setGeneratedKeyName(String generatedKeyName) {
checkIfConfigurationModificationIsAllowed();
this.generatedKeyNames = new String[] {generatedKeyName};
}
/**
* Set the names of any generated keys.
*/
public void setGeneratedKeyNames(String... generatedKeyNames) {
checkIfConfigurationModificationIsAllowed();
this.generatedKeyNames = generatedKeyNames;
}
/**
* Get the names of any generated keys.
*/
public String[] getGeneratedKeyNames() {
return this.generatedKeyNames;
}
/**
* Specify whether the parameter metadata for the call should be used.
* The default is {@code true}.
*/
public void setAccessTableColumnMetaData(boolean accessTableColumnMetaData) {
this.tableMetaDataContext.setAccessTableColumnMetaData(accessTableColumnMetaData);
}
/**
* Specify whether the default for including synonyms should be changed.
* The default is {@code false}.
*/
public void setOverrideIncludeSynonymsDefault(boolean override) {
this.tableMetaDataContext.setOverrideIncludeSynonymsDefault(override);
}
/**
* Set the {@link NativeJdbcExtractor} to use to retrieve the native connection if necessary
*/
public void setNativeJdbcExtractor(NativeJdbcExtractor nativeJdbcExtractor) {
this.tableMetaDataContext.setNativeJdbcExtractor(nativeJdbcExtractor);
}
/**
* Get the insert string to be used.
*/
public String getInsertString() {
return this.insertString;
}
/**
* Get the array of {@link java.sql.Types} to be used for insert.
*/
public int[] getInsertTypes() {
return this.insertTypes;
}
//-------------------------------------------------------------------------
// Methods handling compilation issues
//-------------------------------------------------------------------------
/**
* Compile this JdbcInsert using provided parameters and meta data plus other settings.
* This finalizes the configuration for this object and subsequent attempts to compile are
* ignored. This will be implicitly called the first time an un-compiled insert is executed.
* @throws InvalidDataAccessApiUsageException if the object hasn't been correctly initialized,
* for example if no DataSource has been provided
*/
public synchronized final void compile() throws InvalidDataAccessApiUsageException {
if (!isCompiled()) {
if (getTableName() == null) {
throw new InvalidDataAccessApiUsageException("Table name is required");
}
try {
this.jdbcTemplate.afterPropertiesSet();
}
catch (IllegalArgumentException ex) {
throw new InvalidDataAccessApiUsageException(ex.getMessage());
}
compileInternal();
this.compiled = true;
if (logger.isDebugEnabled()) {
logger.debug("JdbcInsert for table [" + getTableName() + "] compiled");
}
}
}
/**
* Delegate method to perform the actual compilation.
* <p>Subclasses can override this template method to perform their own compilation.
* Invoked after this base class's compilation is complete.
*/
protected void compileInternal() {
this.tableMetaDataContext.processMetaData(
getJdbcTemplate().getDataSource(), getColumnNames(), getGeneratedKeyNames());
this.insertString = this.tableMetaDataContext.createInsertString(getGeneratedKeyNames());
this.insertTypes = this.tableMetaDataContext.createInsertTypes();
if (logger.isDebugEnabled()) {
logger.debug("Compiled insert object: insert string is [" + getInsertString() + "]");
}
onCompileInternal();
}
/**
* Hook method that subclasses may override to react to compilation.
* <p>This implementation is empty.
*/
protected void onCompileInternal() {
}
/**
* Is this operation "compiled"?
* @return whether this operation is compiled, and ready to use.
*/
public boolean isCompiled() {
return this.compiled;
}
/**
* Check whether this operation has been compiled already;
* lazily compile it if not already compiled.
* <p>Automatically called by {@code validateParameters}.
*/
protected void checkCompiled() {
if (!isCompiled()) {
logger.debug("JdbcInsert not compiled before execution - invoking compile");
compile();
}
}
/**
* Method to check whether we are allowed to make any configuration changes at this time.
* If the class has been compiled, then no further changes to the configuration are allowed.
*/
protected void checkIfConfigurationModificationIsAllowed() {
if (isCompiled()) {
throw new InvalidDataAccessApiUsageException(
"Configuration can't be altered once the class has been compiled or used");
}
}
//-------------------------------------------------------------------------
// Methods handling execution
//-------------------------------------------------------------------------
/**
* Delegate method that executes the insert using the passed-in Map of parameters.
* @param args Map with parameter names and values to be used in insert
* @return the number of rows affected
*/
protected int doExecute(Map<String, ?> args) {
checkCompiled();
List<Object> values = matchInParameterValuesWithInsertColumns(args);
return executeInsertInternal(values);
}
/**
* Delegate method that executes the insert using the passed-in {@link SqlParameterSource}.
* @param parameterSource parameter names and values to be used in insert
* @return the number of rows affected
*/
protected int doExecute(SqlParameterSource parameterSource) {
checkCompiled();
List<Object> values = matchInParameterValuesWithInsertColumns(parameterSource);
return executeInsertInternal(values);
}
/**
* Delegate method to execute the insert.
*/
private int executeInsertInternal(List<?> values) {
if (logger.isDebugEnabled()) {
logger.debug("The following parameters are used for insert " + getInsertString() + " with: " + values);
}
return getJdbcTemplate().update(getInsertString(), values.toArray(), getInsertTypes());
}
/**
* Method that provides execution of the insert using the passed-in
* Map of parameters and returning a generated key.
* @param args Map with parameter names and values to be used in insert
* @return the key generated by the insert
*/
protected Number doExecuteAndReturnKey(Map<String, ?> args) {
checkCompiled();
List<Object> values = matchInParameterValuesWithInsertColumns(args);
return executeInsertAndReturnKeyInternal(values);
}
/**
* Method that provides execution of the insert using the passed-in
* {@link SqlParameterSource} and returning a generated key.
* @param parameterSource parameter names and values to be used in insert
* @return the key generated by the insert
*/
protected Number doExecuteAndReturnKey(SqlParameterSource parameterSource) {
checkCompiled();
List<Object> values = matchInParameterValuesWithInsertColumns(parameterSource);
return executeInsertAndReturnKeyInternal(values);
}
/**
* Method that provides execution of the insert using the passed-in
* Map of parameters and returning all generated keys.
* @param args Map with parameter names and values to be used in insert
* @return the KeyHolder containing keys generated by the insert
*/
protected KeyHolder doExecuteAndReturnKeyHolder(Map<String, ?> args) {
checkCompiled();
List<Object> values = matchInParameterValuesWithInsertColumns(args);
return executeInsertAndReturnKeyHolderInternal(values);
}
/**
* Method that provides execution of the insert using the passed-in
* {@link SqlParameterSource} and returning all generated keys.
* @param parameterSource parameter names and values to be used in insert
* @return the KeyHolder containing keys generated by the insert
*/
protected KeyHolder doExecuteAndReturnKeyHolder(SqlParameterSource parameterSource) {
checkCompiled();
List<Object> values = matchInParameterValuesWithInsertColumns(parameterSource);
return executeInsertAndReturnKeyHolderInternal(values);
}
/**
* Delegate method to execute the insert, generating a single key.
*/
private Number executeInsertAndReturnKeyInternal(final List<?> values) {
KeyHolder kh = executeInsertAndReturnKeyHolderInternal(values);
if (kh != null && kh.getKey() != null) {
return kh.getKey();
}
else {
throw new DataIntegrityViolationException(
"Unable to retrieve the generated key for the insert: " + getInsertString());
}
}
/**
* Delegate method to execute the insert, generating any number of keys.
*/
private KeyHolder executeInsertAndReturnKeyHolderInternal(final List<?> values) {
if (logger.isDebugEnabled()) {
logger.debug("The following parameters are used for call " + getInsertString() + " with: " + values);
}
final KeyHolder keyHolder = new GeneratedKeyHolder();
if (this.tableMetaDataContext.isGetGeneratedKeysSupported()) {
getJdbcTemplate().update(
new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement ps = prepareStatementForGeneratedKeys(con);
setParameterValues(ps, values, getInsertTypes());
return ps;
}
},
keyHolder);
}
else {
if (!this.tableMetaDataContext.isGetGeneratedKeysSimulated()) {
throw new InvalidDataAccessResourceUsageException(
"The getGeneratedKeys feature is not supported by this database");
}
if (getGeneratedKeyNames().length < 1) {
throw new InvalidDataAccessApiUsageException("Generated Key Name(s) not specified. " +
"Using the generated keys features requires specifying the name(s) of the generated column(s)");
}
if (getGeneratedKeyNames().length > 1) {
throw new InvalidDataAccessApiUsageException(
"Current database only supports retrieving the key for a single column. There are " +
getGeneratedKeyNames().length + " columns specified: " + Arrays.asList(getGeneratedKeyNames()));
}
// This is a hack to be able to get the generated key from a database that doesn't support
// get generated keys feature. HSQL is one, PostgreSQL is another. Postgres uses a RETURNING
// clause while HSQL uses a second query that has to be executed with the same connection.
final String keyQuery = this.tableMetaDataContext.getSimulationQueryForGetGeneratedKey(
this.tableMetaDataContext.getTableName(), getGeneratedKeyNames()[0]);
Assert.notNull(keyQuery, "Query for simulating get generated keys can't be null");
if (keyQuery.toUpperCase().startsWith("RETURNING")) {
Long key = getJdbcTemplate().queryForObject(getInsertString() + " " + keyQuery,
values.toArray(new Object[values.size()]), Long.class);
Map<String, Object> keys = new HashMap<String, Object>(1);
keys.put(getGeneratedKeyNames()[0], key);
keyHolder.getKeyList().add(keys);
}
else {
getJdbcTemplate().execute(new ConnectionCallback<Object>() {
@Override
public Object doInConnection(Connection con) throws SQLException, DataAccessException {
// Do the insert
PreparedStatement ps = null;
try {
ps = con.prepareStatement(getInsertString());
setParameterValues(ps, values, getInsertTypes());
ps.executeUpdate();
}
finally {
JdbcUtils.closeStatement(ps);
}
//Get the key
Statement keyStmt = null;
ResultSet rs = null;
Map<String, Object> keys = new HashMap<String, Object>(1);
try {
keyStmt = con.createStatement();
rs = keyStmt.executeQuery(keyQuery);
if (rs.next()) {
long key = rs.getLong(1);
keys.put(getGeneratedKeyNames()[0], key);
keyHolder.getKeyList().add(keys);
}
}
finally {
JdbcUtils.closeResultSet(rs);
JdbcUtils.closeStatement(keyStmt);
}
return null;
}
});
}
return keyHolder;
}
return keyHolder;
}
/**
* Create a PreparedStatement to be used for an insert operation with generated keys.
* @param con the Connection to use
* @return the PreparedStatement
*/
private PreparedStatement prepareStatementForGeneratedKeys(Connection con) throws SQLException {
if (getGeneratedKeyNames().length < 1) {
throw new InvalidDataAccessApiUsageException("Generated Key Name(s) not specified. " +
"Using the generated keys features requires specifying the name(s) of the generated column(s).");
}
PreparedStatement ps;
if (this.tableMetaDataContext.isGeneratedKeysColumnNameArraySupported()) {
if (logger.isDebugEnabled()) {
logger.debug("Using generated keys support with array of column names.");
}
ps = con.prepareStatement(getInsertString(), getGeneratedKeyNames());
}
else {
if (logger.isDebugEnabled()) {
logger.debug("Using generated keys support with Statement.RETURN_GENERATED_KEYS.");
}
ps = con.prepareStatement(getInsertString(), Statement.RETURN_GENERATED_KEYS);
}
return ps;
}
/**
* Delegate method that executes a batch insert using the passed-in Maps of parameters.
* @param batch array of Maps with parameter names and values to be used in batch insert
* @return array of number of rows affected
*/
protected int[] doExecuteBatch(Map<String, ?>... batch) {
checkCompiled();
List<List<Object>> batchValues = new ArrayList<List<Object>>(batch.length);
for (Map<String, ?> args : batch) {
batchValues.add(matchInParameterValuesWithInsertColumns(args));
}
return executeBatchInternal(batchValues);
}
/**
* Delegate method that executes a batch insert using the passed-in {@link SqlParameterSource}s.
* @param batch array of SqlParameterSource with parameter names and values to be used in insert
* @return array of number of rows affected
*/
protected int[] doExecuteBatch(SqlParameterSource... batch) {
checkCompiled();
List<List<Object>> batchValues = new ArrayList<List<Object>>(batch.length);
for (SqlParameterSource parameterSource : batch) {
batchValues.add(matchInParameterValuesWithInsertColumns(parameterSource));
}
return executeBatchInternal(batchValues);
}
/**
* Delegate method to execute the batch insert.
*/
private int[] executeBatchInternal(final List<List<Object>> batchValues) {
if (logger.isDebugEnabled()) {
logger.debug("Executing statement " + getInsertString() + " with batch of size: " + batchValues.size());
}
return getJdbcTemplate().batchUpdate(getInsertString(),
new BatchPreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps, int i) throws SQLException {
setParameterValues(ps, batchValues.get(i), getInsertTypes());
}
@Override
public int getBatchSize() {
return batchValues.size();
}
});
}
/**
* Internal implementation for setting parameter values
* @param preparedStatement the PreparedStatement
* @param values the values to be set
*/
private void setParameterValues(PreparedStatement preparedStatement, List<?> values, int... columnTypes)
throws SQLException {
int colIndex = 0;
for (Object value : values) {
colIndex++;
if (columnTypes == null || colIndex > columnTypes.length) {
StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, SqlTypeValue.TYPE_UNKNOWN, value);
}
else {
StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, columnTypes[colIndex - 1], value);
}
}
}
/**
* Match the provided in parameter values with registered parameters and parameters
* defined via metadata processing.
* @param parameterSource the parameter values provided as a {@link SqlParameterSource}
* @return Map with parameter names and values
*/
protected List<Object> matchInParameterValuesWithInsertColumns(SqlParameterSource parameterSource) {
return this.tableMetaDataContext.matchInParameterValuesWithInsertColumns(parameterSource);
}
/**
* Match the provided in parameter values with registered parameters and parameters
* defined via metadata processing.
* @param args the parameter values provided in a Map
* @return Map with parameter names and values
*/
protected List<Object> matchInParameterValuesWithInsertColumns(Map<String, ?> args) {
return this.tableMetaDataContext.matchInParameterValuesWithInsertColumns(args);
}
}
| |
package org.marketcetera.metrics;
import org.marketcetera.util.misc.ClassVersion;
import org.marketcetera.util.misc.NamedThreadFactory;
import org.marketcetera.util.test.RegExAssert;
import org.marketcetera.core.LoggerConfiguration;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.After;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import org.hamcrest.Matchers;
import javax.management.MBeanServer;
import javax.management.JMX;
import java.util.concurrent.*;
import java.util.LinkedList;
import java.util.List;
import java.util.ArrayList;
import java.io.*;
import java.lang.management.ManagementFactory;
/* $License$ */
/**
* Tests {@link ThreadedMetric}
*
* @author anshul@marketcetera.com
* @version $Id: ThreadedMetricTest.java 16154 2012-07-14 16:34:05Z colin $
* @since 2.0.0
*/
@ClassVersion("$Id: ThreadedMetricTest.java 16154 2012-07-14 16:34:05Z colin $")
public class ThreadedMetricTest {
@BeforeClass
public static void logSetup() {
LoggerConfiguration.logSetup();
ThreadedMetric.setEnabled(true);
}
@Test
public void single() throws Exception {
final long currentTime = System.nanoTime();
String firstID = "first";
String secondID = "second";
String thirdID = "third";
//Run one iteration
oneIteration(TRUE, firstID, secondID, thirdID);
//Verify the summary
String[][][] sets = processCSV(summarize());
assertEquals(1, sets.length);
String[][] rows = sets[0];
assertOutput(new Object[][]{
{ThreadedMetric.BEGIN_IDENTIFIER, firstID, secondID, thirdID,
ThreadedMetric.END_IDENTIFIER, ThreadedMetric.ITERATIONS_HEADER},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 1}
}, rows);
}
@Test
public void ignoreExceptionInCondition() throws Exception {
final long currentTime = System.nanoTime();
String firstID = "first";
String secondID = "second";
//throw an exception, this should cause this metric to be ignored
oneIteration(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
throw new IllegalArgumentException();
}
}, firstID, secondID);
//Do another iteration
oneIteration(TRUE, firstID, secondID);
String[][][] sets = processCSV(summarize());
assertEquals(1, sets.length);
String [][]rows = sets[0];
assertEquals(2, rows.length);
assertOutput(new Object[][]{
{ThreadedMetric.BEGIN_IDENTIFIER, firstID, secondID,
ThreadedMetric.END_IDENTIFIER, ThreadedMetric.ITERATIONS_HEADER},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 2}
}, rows);
}
@Test
public void missingCheckpoints() throws Exception {
final long currentTime = System.nanoTime();
String firstID = "first";
String secondID = "second";
String thirdID = "third";
//have an iteration with all the checkpoints
oneIteration(TRUE, firstID, secondID, thirdID);
//now have iterations with couple checkpoints missing
oneIteration(TRUE, secondID, thirdID);
oneIteration(TRUE, firstID, thirdID);
oneIteration(TRUE, firstID, secondID);
oneIteration(TRUE, firstID);
oneIteration(TRUE, secondID);
oneIteration(TRUE, thirdID);
//and one final iteration with all the checkpoints
oneIteration(TRUE, firstID, secondID, thirdID);
String[][][] sets = processCSV(summarize());
assertEquals(1, sets.length);
String[][] rows = sets[0];
assertEquals(9, rows.length);
assertOutput(new Object[][]{
{ThreadedMetric.BEGIN_IDENTIFIER, firstID, secondID, thirdID, ThreadedMetric.END_IDENTIFIER, ThreadedMetric.ITERATIONS_HEADER},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 1},
{currentTime, "", sleepInterval, sleepInterval, sleepInterval, 2},
{currentTime, sleepInterval, "", sleepInterval, sleepInterval, 3},
{currentTime, sleepInterval, sleepInterval, "", sleepInterval, 4},
{currentTime, sleepInterval, "", "", sleepInterval, 5},
{currentTime, "", sleepInterval, "", sleepInterval, 6},
{currentTime, "", "", sleepInterval, sleepInterval, 7},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 8},
},rows);
}
@Test
public void extraCheckpoints() throws Exception {
final long currentTime = System.nanoTime();
String firstID = "first";
String secondID = "second";
String thirdID = "third";
String fourthID = "fourth";
//have an iteration with all the checkpoints
oneIteration(TRUE, firstID, thirdID);
//now have iterations with couple extra checkpoints
oneIteration(TRUE, firstID, firstID, thirdID);
oneIteration(TRUE, firstID, secondID, thirdID);
oneIteration(TRUE, firstID, thirdID, fourthID);
oneIteration(TRUE, firstID, firstID, secondID, thirdID);
oneIteration(TRUE, firstID, secondID, firstID, thirdID);
oneIteration(TRUE, firstID, secondID, secondID, thirdID, firstID, fourthID, firstID);
//and one final iteration with all the checkpoints
oneIteration(TRUE, firstID, thirdID);
String[][][] sets = processCSV(summarize());
assertEquals(1, sets.length);
String[][] rows = sets[0];
assertEquals(9, rows.length);
assertOutput(new Object[][]{
{ThreadedMetric.BEGIN_IDENTIFIER, firstID, thirdID, ThreadedMetric.END_IDENTIFIER, ThreadedMetric.ITERATIONS_HEADER},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 1},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 2, "\\[first=" + NPTN + "\\]"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 3, "\\[second=" + NPTN + "\\]"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 4, "\\[fourth=" + NPTN + "\\]"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 5, "\\[first=" + NPTN + ":second=" + NPTN + "\\]"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 6, "\\[second=" + NPTN + ":first=" + NPTN + "\\]"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 7, "\\[second=" + NPTN + ":second=" + NPTN + ":first=" + NPTN + ":fourth=" + NPTN + ":first=" + NPTN + "\\]"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 8}
},rows);
}
@Test
public void extraDetails() throws Exception {
final long currentTime = System.nanoTime();
String firstID = "first";
String secondID = "second";
//Simple cases
//detail in begin.
ThreadedMetric.begin("detail1", 2);
events(firstID, secondID);
ThreadedMetric.end(TRUE);
//detail in events
ThreadedMetric.begin();
sleep();
ThreadedMetric.event(firstID, 3, "detail4");
events(secondID);
ThreadedMetric.end(TRUE);
//detail in end
ThreadedMetric.begin();
events(firstID, secondID);
ThreadedMetric.end(TRUE, "detail5", true);
//detail for each checkpoint
ThreadedMetric.begin("detail1");
sleep();
ThreadedMetric.event(firstID, 43);
sleep();
ThreadedMetric.event(secondID, "detail6");
sleep();
ThreadedMetric.end(TRUE, "detail7");
//no details
oneIteration(TRUE, firstID, secondID);
String[][][] sets = processCSV(summarize());
assertEquals(1, sets.length);
String[][] rows = sets[0];
assertEquals(6, rows.length);
assertOutput(new Object[][]{
{ThreadedMetric.BEGIN_IDENTIFIER, firstID, secondID, ThreadedMetric.END_IDENTIFIER, ThreadedMetric.ITERATIONS_HEADER},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 1, "\\{BEGIN=\\[detail1;2\\]\\}"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 2, "\\{first=\\[3;detail4\\]\\}"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 3, "\\{END=\\[detail5;true\\]\\}"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 4, "\\{BEGIN=\\[detail1\\]:first=\\[43\\]:second=\\[detail6\\]:END=\\[detail7\\]\\}"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, 5},
},rows);
}
@Test
public void empty() throws Exception {
assertEmptySummary();
}
@Test(timeout = 60000)
public void multiple() throws Exception {
ExecutorService exec = Executors.newCachedThreadPool(
new NamedThreadFactory("ThreadedMetricTest-"));
final long currentTime = System.nanoTime();
final String firstID = "first";
final String secondID = "second";
final String thirdID = "third";
final Callable<Boolean> condition = ConditionsFactory.createSamplingCondition(10, null);
List<Future<?>> futures = new ArrayList<Future<?>>();
//Start off 10 threads with 100 iterations each
for (int i = 0; i < 10; i++) {
futures.add(exec.submit(new Callable<Object>(){
public Object call() throws Exception {
for (int i = 0 ; i < 109; i++) {
oneIteration(condition, firstID,secondID,thirdID);
}
return null;
}
}));
}
//Wait for all the tasks to complete
for(Future<?> future:futures) {
future.get();
}
String[][][] sets = processCSV(summarize());
assertEquals(10,sets.length);
for (String[][]rows: sets) {
assertOutput(new Object[][]{
{ThreadedMetric.BEGIN_IDENTIFIER, firstID, secondID,
thirdID, ThreadedMetric.END_IDENTIFIER, ThreadedMetric.ITERATIONS_HEADER},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 10},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 20},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 30},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 40},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 50},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 60},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 70},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 80},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 90},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 100}
}, rows);
}
}
@Test
public void nested() throws Exception {
final long currentTime = System.nanoTime();
nestedCall1(2,2);
nestedCall1(1,1);
String[][][] sets = processCSV(summarize());
assertEquals(1, sets.length);
String[][] rows = sets[0];
assertEquals(3, rows.length);
assertOutput(new Object[][]{
{ThreadedMetric.BEGIN_IDENTIFIER, "beforeNest1", "beforeNest2", "nest3First", "nest3Second", "afterNest2", "afterNest1", ThreadedMetric.END_IDENTIFIER, ThreadedMetric.ITERATIONS_HEADER},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 1, "\\[nest3First=" + NPTN + ":nest3Second=" + NPTN + ":beforeNest2=" + NPTN + ":nest3First=" + NPTN + ":nest3Second=" + NPTN + ":nest3First=" + NPTN + ":nest3Second=" + NPTN + ":afterNest2=" + NPTN + "\\]", "\\{beforeNest1=\\[2\\]:beforeNest2=\\[2\\]:beforeNest2=\\[2\\]\\}"},
{currentTime, sleepInterval, sleepInterval, sleepInterval, sleepInterval, sleepInterval, sleepInterval, sleepInterval, 2, "\\{beforeNest1=\\[1\\]:beforeNest2=\\[1\\]\\}"},
},rows);
}
@Test
public void disable() throws Exception {
verifyEnabled();
//Clear the summary
ThreadedMetric.clear();
assertEmptySummary();
ThreadedMetric.setEnabled(false);
try {
verifyDisabled();
} finally {
ThreadedMetric.setEnabled(true);
}
}
private void verifyDisabled() throws Exception {
oneIteration(TRUE, "first", "second", "third");
assertEmptySummary();
}
private static void assertEmptySummary() throws IOException {
String[][][] sets = processCSV(summarize());
assertEquals(0, sets.length);
}
private void verifyEnabled() throws Exception {
single();
}
@Test
public void jmx() throws Exception {
assertEquals("org.marketcetera.metrics:name=ThreadedMetric",JmxUtils.DEFAULT_NAME.toString());
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
assertFalse(server.isRegistered(JmxUtils.DEFAULT_NAME));
JmxUtils.registerMgmtInterface(server);
assertTrue(server.isRegistered(JmxUtils.DEFAULT_NAME));
//Test various jmx methods.
ThreadedMetricMXBean metric = JMX.newMXBeanProxy(server,
JmxUtils.DEFAULT_NAME, ThreadedMetricMXBean.class);
//verify that the summary is empty
assertEmptySummary();
//verify default value (as set by this unit test)
assertEquals(true, metric.isEnabled());
//now disable instrumentation.
metric.setEnabled(false);
assertEquals(false, metric.isEnabled());
assertEquals(false, ThreadedMetric.isEnabled());
//verify that the metric is disabled.
verifyDisabled();
//now enable it
metric.setEnabled(true);
//and verify that it is enabled.
assertEquals(true, metric.isEnabled());
assertEquals(true, ThreadedMetric.isEnabled());
verifyEnabled();
//Verify the clear operation
metric.clear();
assertEmptySummary();
//Run the summarize operations to test that they do not fail.
metric.summarize(true);
metric.summarize(false);
//verify the reported configured properties.
assertThat(metric.getConfiguredProperties(),
Matchers.allOf(Matchers.hasEntry("metc.metrics.enable","false"),
Matchers.hasEntry("metc.metrics.jmx.enable","false")));
//unregister the management interface
JmxUtils.unregisterMgmtInterface(server);
//And verify that it did get unregistered.
assertFalse(server.isRegistered(JmxUtils.DEFAULT_NAME));
}
private static void nestedCall1(int inNumLoop1, int inNumLoop2) throws Exception {
ThreadedMetric.begin();
sleep();
ThreadedMetric.event("beforeNest1",inNumLoop1);
for(int i = 0; i < inNumLoop1; i++) {
nestedCall2(inNumLoop2);
}
ThreadedMetric.event("afterNest1");
sleep();
ThreadedMetric.end(TRUE);
}
private static void nestedCall2(int inNumLoop2) throws Exception {
sleep();
ThreadedMetric.event("beforeNest2",inNumLoop2);
for(int i = 0; i < inNumLoop2; i++) {
nestedCall3();
}
ThreadedMetric.event("afterNest2");
sleep();
}
private static void nestedCall3() throws Exception {
events("nest3First", "nest3Second");
}
@After
public void clearMetrics() {
ThreadedMetric.clear();
}
static void sleep() throws InterruptedException {
Thread.sleep(sleepInterval);
}
private static final Callable<Boolean> TRUE = new Callable<Boolean>() {
public Boolean call() throws Exception {
return Boolean.TRUE;
}
};
private static void oneIteration(Callable<Boolean> inCondition,
String... inEventIDs) throws Exception {
ThreadedMetric.begin();
events(inEventIDs);
ThreadedMetric.end(inCondition);
}
private static void events(String ... inEventIDs) throws Exception {
for(String eventID: inEventIDs) {
sleep();
ThreadedMetric.event(eventID);
}
sleep();
}
private static String[][][]processCSV(byte[][] inputs) throws IOException {
String [][][]value = new String[inputs.length][][];
int idx = 0;
for(byte[] input: inputs) {
value[idx++] = processCSV(input);
}
return value;
}
private static String[][]processCSV(byte[] input) throws IOException {
InputStream is = new ByteArrayInputStream(input);
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line;
LinkedList<String[]> rows = new LinkedList<String[]>();
while((line = reader.readLine()) != null) {
rows.add(line.split(","));
}
return rows.toArray(new String[rows.size()][]);
}
private static byte[][] summarize() throws IOException {
final List<ByteArrayOutputStream> streams = new LinkedList<ByteArrayOutputStream>();
ThreadedMetric.summarizeResults(new PrintStreamFactory(){
@Override
public PrintStream getStream(String inName) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
streams.add(baos);
return new PrintStream(baos);
}
@Override
public void done(PrintStream inStream) throws IOException {
inStream.close();
}
});
//Uncomment the following line to observe summary generated by the tests.
//ThreadedMetric.summarizeResults(StdErrFactory.INSTANCE);
byte[][]value = new byte[streams.size()][];
int idx = 0;
for(ByteArrayOutputStream baos: streams) {
value[idx++] = baos.toByteArray();
}
return value;
}
/**
* Verifies that the supplied matrix of strings matches the expectation.
*
* @param inExpected the matrix of expectations. Numeric expectations,
* expect the supplied string to be numeric and have a value less than or
* equal to the expectation. String expectations are expected to be regex
* patterns and the actual string is matched against the regex. String
* expectations of length 0, expect the actual value to be 0. Any other
* expectation types are expected to be equal to the actual value.
* @param inActual the matrix of actual values.
*
* @throws Exception if there were any unexpected errors.
*/
static void assertOutput(Object[][] inExpected, String [][]inActual) throws Exception {
assertEquals(inExpected.length, inActual.length);
for(int i = 0; i < inExpected.length; i++) {
Object [] expected = inExpected[i];
String [] actual = inActual[i];
assertEquals("iteration:" + i,expected.length, actual.length);
for(int j = 0; j < expected.length; j++) {
Object expect = expected[j];
final String errMsg = "iteration:" + i + "," + j;
if(expect instanceof Comparable && expect instanceof Number) {
assertThat(errMsg,
parseNumber(actual[j], ((Comparable)expect).getClass()),
Matchers.greaterThanOrEqualTo((Comparable)expect));
} else if(expect instanceof String){
String exp = (String) expect;
if(exp.length() > 0) {
//assume regex
RegExAssert.assertMatches(errMsg, exp, actual[j]);
} else {
assertEquals(errMsg, expect, actual[j]);
}
} else {
assertEquals(errMsg, expect, actual[j]);
}
}
}
}
static Long parseLong(String inValue) {
try {
return Long.parseLong(inValue);
} catch (NumberFormatException e) {
return null;
}
}
static <T extends Comparable> T parseNumber(String inString, Class<T> inClass) throws Exception {
return inClass.getConstructor(String.class).newInstance(inString);
}
static final long sleepInterval = 100;
/**
* A regex patter that matches the number of nanoseconds of time interval
* corresponding to sleepInterval above. Due to timing inaccuracies
* the actual time might be slightly less that the sleepInterval above.
* And this pattern should account for that.
*/
static final String NPTN = "\\d{8,9}";
}
| |
/*******************************************************************************
* Copyright (c) cortical.io GmbH. All rights reserved.
*
* This software is confidential and proprietary information.
* You shall use it only in accordance with the terms of the
* license agreement you entered into with cortical.io GmbH.
******************************************************************************/
package io.cortical.services.api.client.api;
import io.cortical.rest.model.Context;
import io.cortical.rest.model.Fingerprint;
import io.cortical.rest.model.Term;
import io.cortical.services.api.client.ApiException;
import io.cortical.services.api.client.ApiInvoker;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** Generated. **/
public class ExpressionsApi {
private String basePath = "http://api.cortical.io/rest";
/* replace the value of key with your api-key */
private String key = "replaceWithRetinaAPIKey";
private ApiInvoker apiInvoker;
/** Generated. **/
public ExpressionsApi(String apiKey) {
apiInvoker = ApiInvoker.getInstance();
this.key = apiKey;
apiInvoker.addDefaultHeader("api-key", apiKey);
}
/** Generated.
*@return {@link ApiInvoker}
**/
public ApiInvoker getInvoker() {
return apiInvoker;
}
/** Generated.
*@param basePath the path to set
**/
public void setBasePath(String basePath) {
this.basePath = basePath;
}
/** Generated.
*@return String
**/
public String getBasePath() {
return basePath;
}
/** Generated.
*@throws ApiException if an error occurs during querying of the API.
*@return Fingerprint **/
public Fingerprint resolveExpression (String body, String retina_name, Double sparsity) throws ApiException {
// verify required params are set
if(retina_name == null || body == null ) {
throw new ApiException(400, "missing required params");
}
// create path and map variables
String path = "/expressions".replaceAll("\\{format\\}","json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
if(!"null".equals(String.valueOf(retina_name)))
queryParams.put("retina_name", String.valueOf(retina_name));
if(!"null".equals(String.valueOf(sparsity)))
queryParams.put("sparsity", String.valueOf(sparsity));
String contentType = "application/json";
try {
Object response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams
, body, headerParams, contentType);
if(response != null) {
if (response instanceof String) {
@SuppressWarnings("unchecked")
Fingerprint result = (Fingerprint) ApiInvoker.deserialize( (String) response, ""
, Fingerprint.class, null);return result;
}
else if (response instanceof java.io.ByteArrayInputStream) {
@SuppressWarnings("unchecked")
Fingerprint result = (Fingerprint) response;return result;
}
}
else {
return null;
}
} catch (ApiException ex) {
if(ex.getCode() == 404) {
return null;
}
else {
throw ex;
}
}
return null;}
/** Generated.
*@throws ApiException if an error occurs during querying of the API.
*@return List<Context> **/
public List<Context> getContextsForExpression (String body, Boolean get_fingerprint, String retina_name, Integer start_index, Integer max_results, Double sparsity) throws ApiException {
// verify required params are set
if(retina_name == null || body == null ) {
throw new ApiException(400, "missing required params");
}
// create path and map variables
String path = "/expressions/contexts".replaceAll("\\{format\\}","json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
if(!"null".equals(String.valueOf(retina_name)))
queryParams.put("retina_name", String.valueOf(retina_name));
if(!"null".equals(String.valueOf(start_index)))
queryParams.put("start_index", String.valueOf(start_index));
if(!"null".equals(String.valueOf(max_results)))
queryParams.put("max_results", String.valueOf(max_results));
if(!"null".equals(String.valueOf(sparsity)))
queryParams.put("sparsity", String.valueOf(sparsity));
if(!"null".equals(String.valueOf(get_fingerprint)))
queryParams.put("get_fingerprint", String.valueOf(get_fingerprint));
String contentType = "application/json";
try {
Object response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams
, body, headerParams, contentType);
if(response != null) {
if (response instanceof String) {
@SuppressWarnings("unchecked")
List<Context> result = (List<Context>) ApiInvoker.deserialize( (String) response, "Array"
, Context.class, null);return result;
}
else if (response instanceof java.io.ByteArrayInputStream) {
@SuppressWarnings("unchecked")
List<Context> result = (List<Context>) response;return result;
}
}
else {
return null;
}
} catch (ApiException ex) {
if(ex.getCode() == 404) {
return null;
}
else {
throw ex;
}
}
return null;}
/** Generated.
*@throws ApiException if an error occurs during querying of the API.
*@return List<Term> **/
public List<Term> getSimilarTermsForExpressionContext (String body, Integer context_id, String pos_type, Boolean get_fingerprint, String retina_name, Integer start_index, Integer max_results, Double sparsity) throws ApiException {
// verify required params are set
if(retina_name == null || body == null ) {
throw new ApiException(400, "missing required params");
}
// create path and map variables
String path = "/expressions/similar_terms".replaceAll("\\{format\\}","json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
if(!"null".equals(String.valueOf(retina_name)))
queryParams.put("retina_name", String.valueOf(retina_name));
if(!"null".equals(String.valueOf(context_id)))
queryParams.put("context_id", String.valueOf(context_id));
if(!"null".equals(String.valueOf(start_index)))
queryParams.put("start_index", String.valueOf(start_index));
if(!"null".equals(String.valueOf(max_results)))
queryParams.put("max_results", String.valueOf(max_results));
if(!"null".equals(String.valueOf(pos_type)))
queryParams.put("pos_type", String.valueOf(pos_type));
if(!"null".equals(String.valueOf(sparsity)))
queryParams.put("sparsity", String.valueOf(sparsity));
if(!"null".equals(String.valueOf(get_fingerprint)))
queryParams.put("get_fingerprint", String.valueOf(get_fingerprint));
String contentType = "application/json";
try {
Object response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams
, body, headerParams, contentType);
if(response != null) {
if (response instanceof String) {
@SuppressWarnings("unchecked")
List<Term> result = (List<Term>) ApiInvoker.deserialize( (String) response, "Array"
, Term.class, null);return result;
}
else if (response instanceof java.io.ByteArrayInputStream) {
@SuppressWarnings("unchecked")
List<Term> result = (List<Term>) response;return result;
}
}
else {
return null;
}
} catch (ApiException ex) {
if(ex.getCode() == 404) {
return null;
}
else {
throw ex;
}
}
return null;}
/** Generated.
*@throws ApiException if an error occurs during querying of the API.
*@return List<Fingerprint> **/
public List<Fingerprint> resolveBulkExpression (String body, String retina_name, Double sparsity) throws ApiException {
// verify required params are set
if(retina_name == null || body == null ) {
throw new ApiException(400, "missing required params");
}
// create path and map variables
String path = "/expressions/bulk".replaceAll("\\{format\\}","json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
if(!"null".equals(String.valueOf(retina_name)))
queryParams.put("retina_name", String.valueOf(retina_name));
if(!"null".equals(String.valueOf(sparsity)))
queryParams.put("sparsity", String.valueOf(sparsity));
String contentType = "application/json";
try {
Object response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams
, body, headerParams, contentType);
if(response != null) {
if (response instanceof String) {
@SuppressWarnings("unchecked")
List<Fingerprint> result = (List<Fingerprint>) ApiInvoker.deserialize( (String) response, "Array"
, Fingerprint.class, null);return result;
}
else if (response instanceof java.io.ByteArrayInputStream) {
@SuppressWarnings("unchecked")
List<Fingerprint> result = (List<Fingerprint>) response;return result;
}
}
else {
return null;
}
} catch (ApiException ex) {
if(ex.getCode() == 404) {
return null;
}
else {
throw ex;
}
}
return null;}
/** Generated.
*@throws ApiException if an error occurs during querying of the API.
*@return List<List<Context>> **/
public List<List<Context>> getContextsForBulkExpression (String body, Boolean get_fingerprint, String retina_name, Integer start_index, Integer max_results, Double sparsity) throws ApiException {
// verify required params are set
if(retina_name == null || body == null ) {
throw new ApiException(400, "missing required params");
}
// create path and map variables
String path = "/expressions/contexts/bulk".replaceAll("\\{format\\}","json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
if(!"null".equals(String.valueOf(retina_name)))
queryParams.put("retina_name", String.valueOf(retina_name));
if(!"null".equals(String.valueOf(start_index)))
queryParams.put("start_index", String.valueOf(start_index));
if(!"null".equals(String.valueOf(max_results)))
queryParams.put("max_results", String.valueOf(max_results));
if(!"null".equals(String.valueOf(sparsity)))
queryParams.put("sparsity", String.valueOf(sparsity));
if(!"null".equals(String.valueOf(get_fingerprint)))
queryParams.put("get_fingerprint", String.valueOf(get_fingerprint));
String contentType = "application/json";
try {
Object response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams
, body, headerParams, contentType);
if(response != null) {
if (response instanceof String) {
@SuppressWarnings("unchecked")
List<List<Context>> result = (List<List<Context>>) ApiInvoker.deserialize( (String) response, "Array"
, java.lang.Object.class, ApiInvoker.NestedContent.CONTEXT);return result;
}
else if (response instanceof java.io.ByteArrayInputStream) {
@SuppressWarnings("unchecked")
List<List<Context>> result = (List<List<Context>>) response;return result;
}
}
else {
return null;
}
} catch (ApiException ex) {
if(ex.getCode() == 404) {
return null;
}
else {
throw ex;
}
}
return null;}
/** Generated.
*@throws ApiException if an error occurs during querying of the API.
*@return List<List<Term>> **/
public List<List<Term>> getSimilarTermsForBulkExpressionContext (String body, Integer context_id, String pos_type, Boolean get_fingerprint, String retina_name, Integer start_index, Integer max_results, Double sparsity) throws ApiException {
// verify required params are set
if(retina_name == null || body == null ) {
throw new ApiException(400, "missing required params");
}
// create path and map variables
String path = "/expressions/similar_terms/bulk".replaceAll("\\{format\\}","json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
if(!"null".equals(String.valueOf(retina_name)))
queryParams.put("retina_name", String.valueOf(retina_name));
if(!"null".equals(String.valueOf(context_id)))
queryParams.put("context_id", String.valueOf(context_id));
if(!"null".equals(String.valueOf(start_index)))
queryParams.put("start_index", String.valueOf(start_index));
if(!"null".equals(String.valueOf(max_results)))
queryParams.put("max_results", String.valueOf(max_results));
if(!"null".equals(String.valueOf(pos_type)))
queryParams.put("pos_type", String.valueOf(pos_type));
if(!"null".equals(String.valueOf(sparsity)))
queryParams.put("sparsity", String.valueOf(sparsity));
if(!"null".equals(String.valueOf(get_fingerprint)))
queryParams.put("get_fingerprint", String.valueOf(get_fingerprint));
String contentType = "application/json";
try {
Object response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams
, body, headerParams, contentType);
if(response != null) {
if (response instanceof String) {
@SuppressWarnings("unchecked")
List<List<Term>> result = (List<List<Term>>) ApiInvoker.deserialize( (String) response, "Array"
, java.lang.Object.class, ApiInvoker.NestedContent.TERM);return result;
}
else if (response instanceof java.io.ByteArrayInputStream) {
@SuppressWarnings("unchecked")
List<List<Term>> result = (List<List<Term>>) response;return result;
}
}
else {
return null;
}
} catch (ApiException ex) {
if(ex.getCode() == 404) {
return null;
}
else {
throw ex;
}
}
return null;}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package docking.widgets.table.threaded;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import java.awt.BorderLayout;
import java.awt.Rectangle;
import java.awt.event.MouseEvent;
import java.util.List;
import javax.swing.JFrame;
import javax.swing.JScrollPane;
import javax.swing.table.JTableHeader;
import org.junit.*;
import docking.DockingUtils;
import docking.test.AbstractDockingTest;
import docking.widgets.table.*;
public abstract class AbstractThreadedTableTest extends AbstractDockingTest {
protected TestDataKeyModel model;
protected GTable table;
protected JTableHeader header;
protected JFrame frame;
protected TestThreadedTableModelListener testTableModelListener;
protected GThreadedTablePanel<Long> threadedTablePanel;
protected volatile boolean isDisposing = false;
@Before
public void setUp() throws Exception {
model = createTestModel();
testTableModelListener = createListener();
model.addThreadedTableModelListener(testTableModelListener);
// do this in swing, as some of the table column setup can trigger concurrent modifications
// due to the swing and the test working on the widgets at the same time
runSwing(() -> {
threadedTablePanel = new GThreadedTablePanel<>(model);
table = threadedTablePanel.getTable();
header = table.getTableHeader();
buildFrame(threadedTablePanel);
});
}
protected abstract TestDataKeyModel createTestModel();
protected TestThreadedTableModelListener createListener() {
return new TestThreadedTableModelListener(model);
}
@After
public void tearDown() throws Exception {
isDisposing = true;
dispose();
}
protected void buildFrame(GThreadedTablePanel<Long> tablePanel) {
runSwing(() -> {
frame = new JFrame("Threaded Table Test");
frame.getContentPane().setLayout(new BorderLayout());
frame.getContentPane().add(new JScrollPane(tablePanel));
frame.pack();
frame.setVisible(true);
});
}
protected void dispose() {
close(frame);
runSwing(threadedTablePanel::dispose);
}
protected void addItemToModel(long value) {
model.addObject(Long.valueOf(value));
waitForTableModel(model);
}
protected void removeItemFromModel(int value) {
model.removeObject(Long.valueOf(value));
waitForTableModel(model);
}
protected void triggerModelFilter() {
model.reFilter();
waitForTableModel(model);
}
protected void doTestSorting(int columnIndex) throws Exception {
sortByNormalClicking(columnIndex);
SortedTableModel sortedModel = (SortedTableModel) table.getModel();
verifySortDirection(columnIndex, sortedModel);
sortByNormalClicking(columnIndex);
verifySortDirection(columnIndex, sortedModel);
}
@SuppressWarnings("rawtypes")
protected void verifySortDirection(int columnIndex, SortedTableModel sortedModel) {
TableSortState sortState = getSortState(sortedModel);
ColumnSortState columnSortState = sortState.getColumnSortState(columnIndex);
if (columnSortState == null) {
System.err.println("Actual sorted column(s): " + sortState);
Assert.fail("Expected column not sorted! - Expected: " + columnIndex);
}
for (int i = 0; i < table.getRowCount() - 1; ++i) {
Comparable comp1 = (Comparable) table.getValueAt(i + 0, columnIndex);
Comparable comp2 = (Comparable) table.getValueAt(i + 1, columnIndex);
if (columnSortState.isAscending()) {
int compareResult = compareValues(comp1, comp2);
boolean lessThanOrEqual = compareResult <= 0;
assertTrue("\"" + comp1 + "\"" + " is not <= " + "\"" + comp2 + "\"",
lessThanOrEqual);
}
else {
int compareResult = compareValues(comp1, comp2);
boolean greaterThanOrEqual = compareResult >= 0;
assertTrue("\"" + comp1 + "\"" + " is not >= " + "\"" + comp2 + "\"",
greaterThanOrEqual);
}
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
protected int compareValues(Comparable comp1, Comparable comp2) {
if ((comp1 instanceof String) && (comp2 instanceof String)) {
String string1 = (String) comp1;
String string2 = (String) comp2;
return string1.compareToIgnoreCase(string2);
}
return comp1.compareTo(comp2);
}
protected void sortByNormalClicking(int columnToClick) throws Exception {
sortByClick(columnToClick, 0);
}
protected void sortByClick(int columnToClick, int modifiers) throws Exception {
SortedTableModel sortedModel = (SortedTableModel) table.getModel();
TableSortState sortState = getSortState(sortedModel);
record("sortByClick() - initial sort state: " + sortState);
int currentSortColunn = -1;
boolean isAscending = true;
boolean checkSortDirection = false;
if (!sortState.isUnsorted()) {
// check to see if the tests is clicking the same column twice (to change the
// sort direction)
ColumnSortState originalColumnSortState = sortState.iterator().next();
currentSortColunn = originalColumnSortState.getColumnModelIndex();
checkSortDirection = (columnToClick == currentSortColunn);
isAscending = originalColumnSortState.isAscending();
}
testTableModelListener.reset(model);
Rectangle rect = header.getHeaderRect(columnToClick);
if (!header.isShowing()) {
waitForPostedSwingRunnables();
}
record("Clicking table at column " + columnToClick);
clickMouse(header, MouseEvent.BUTTON1, rect.x + 10, rect.y + 10, 1, modifiers);
waitForNotBusy();
record("\tafter click; table not busy");
sortState = getSortState(sortedModel);
record("Updated sort state: " + sortState);
ColumnSortState columnSortState = sortState.iterator().next();
int sortedIndex = columnSortState.getColumnModelIndex();
verifyColumnSorted(sortedIndex, sortState);
if (checkSortDirection) {
boolean newDirection = columnSortState.isAscending();
if (isAscending == newDirection) {
fail("Not sorted in the expected direction");
}
}
}
protected TableSortState getSortState(SortedTableModel sortedModel) {
return runSwing(() -> sortedModel.getTableSortState());
}
protected void removeSortByClicking(int columnToClick) throws Exception {
SortedTableModel sortedModel = (SortedTableModel) table.getModel();
testTableModelListener.reset(model);
Rectangle rect = header.getHeaderRect(columnToClick);
clickMouse(header, MouseEvent.BUTTON1, rect.x + 10, rect.y + 10, 1,
DockingUtils.CONTROL_KEY_MODIFIER_MASK);
waitForNotBusy();
TableSortState sortState = getSortState(sortedModel);
assertNull(sortState.getColumnSortState(columnToClick));
}
protected void verifyColumnSorted(int sortedIndex, TableSortState sortState) {
ColumnSortState columnSortState = sortState.getColumnSortState(sortedIndex);
assertNotNull(columnSortState);
}
protected void resetBusyListener() {
testTableModelListener.reset(model);
}
protected void waitForNotBusy() {
sleep(50);
waitForCondition(() -> testTableModelListener.doneWork(),
"Timed-out waiting for table model to update.");
waitForSwing();
}
protected void addLong(final long value) {
runSwing(() -> model.addObject(Long.valueOf(value)));
}
protected int getRowCount() {
return runSwing(() -> model.getRowCount());
}
protected int getUnfilteredRowCount() {
return runSwing(() -> model.getUnfilteredRowCount());
}
protected List<Long> getModelData() {
return runSwing(() -> model.getModelData());
}
protected void record(String message) {
// no-op for base class; subclasses know how to record debug
}
protected void assertRowCount(int expectedCount) {
int rowCount = model.getRowCount();
assertThat("Have different number of table rows than expected after filtering", rowCount,
is(expectedCount));
}
protected void assertNoRowsFilteredOut() {
List<Long> allData = model.getAllData();
TableData<Long> currentData = model.getCurrentTableData();
assertThat("Table has been filtered", currentData.size(), is(allData.size()));
}
}
| |
/*
* Copyright (C) 2004 Felipe Gustavo de Almeida
* Copyright (C) 2010-2016 The MPDroid Project
*
* All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice,this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.anpmech.mpd.connection;
import com.anpmech.mpd.Log;
import com.anpmech.mpd.concurrent.MPDExecutor;
import com.anpmech.mpd.exception.MPDException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
/**
* This class is a {@link MPDConnection} status tracker.
*/
public abstract class MPDConnectionStatus {
/**
* This flag enables or disables debug log output.
*/
private static final boolean DEBUG = false;
/**
* The class log identifier.
*/
private static final String TAG = "ConnectionStatus";
/**
* This stores a priority listener to be called for the connection instance.
*/
private final MPDConnectionListener mConnectionListener;
/**
* The callbacks to inform of changes.
*/
private final Collection<MPDConnectionListener> mConnectionListeners = new ArrayList<>();
/**
* The connection status binary semaphore.
*
* <p>This Semaphore starts off with no available permits, denoting a lack of connection until
* set otherwise.</p>
*/
private final Semaphore mConnectionStatus = new Semaphore(0);
/**
* This boolean tracks whether the connection was cancelled by the client.
*/
private volatile boolean mIsCancelled;
/**
* The 'connecting' connection status tracking field.
*/
private volatile boolean mIsConnecting;
/**
* This field stores the last time the status of this connection changed.
*/
private long mLastChangeTime = -1L;
/**
* The sole constructor.
*
* @param listener The connected {@link MPDConnection} instance.
*/
MPDConnectionStatus(final MPDConnectionListener listener) {
mConnectionListener = listener;
}
/**
* This method outputs the {@code line} parameter to a {@link Log#debug(String, String)} if
* {@link #DEBUG} is set to {@code true}.
*
* @param line The {@link String} to output to the log.
*/
protected static void debug(final String line) {
if (DEBUG) {
Log.debug(TAG, line);
}
}
/**
* Adds a listener for the connection status.
*
* @param listener A listener for connection status.
*/
public void addListener(final MPDConnectionListener listener) {
if (!mConnectionListeners.contains(listener)) {
mConnectionListeners.add(listener);
}
}
/**
* This is called at the end of the connection class
* {@link MPDConnectionListener#connectionConnected(int)} callback.
*
* <p>This is called from the actual connection class, to prevent calling prior to something
* that needs to be taken care of by the connection prior to child callbacks.</p>
*
* @param commandErrorCode If this number is non-zero, this corresponds to a
* {@link MPDException} error code.
*/
void connectedCallbackComplete(final int commandErrorCode) {
for (final MPDConnectionListener listener : mConnectionListeners) {
MPDExecutor.submitCallback(new Runnable() {
@Override
public void run() {
listener.connectionConnected(commandErrorCode);
}
});
}
}
/**
* This is called at the end of the connection class
* {@link MPDConnectionListener#connectionConnecting()} callback.
*
* <p>This is called from the actual connection class, to prevent calling prior to something
* that needs to be taken care of by the connection prior to child callbacks.</p>
*/
void connectingCallbackComplete() {
for (final MPDConnectionListener listener : mConnectionListeners) {
MPDExecutor.submitCallback(new Runnable() {
@Override
public void run() {
listener.connectionConnecting();
}
});
}
}
/**
* This is called at the end of the connection class
* {@link MPDConnectionListener#connectionDisconnected(String)} callback.
*
* <p>This is called from the actual connection class, to prevent calling prior to something
* that needs to be taken care of by the connection prior to child callbacks.</p>
*
* @param reason The reason for the disconnection.
*/
void disconnectedCallbackComplete(final String reason) {
for (final MPDConnectionListener listener : mConnectionListeners) {
MPDExecutor.submitCallback(new Runnable() {
@Override
public void run() {
listener.connectionDisconnected(reason);
}
});
}
}
/**
* Returns the last time a status change occurred.
*
* @return Returns the last time the connection status was changed in milliseconds since epoch.
* If this connection has never had a connected state, {@link Long#MIN_VALUE} will be returned.
*/
public long getChangeTime() {
return mLastChangeTime;
}
/**
* This should implement the current status of the connection blocking.
*
* @return This should return true if the connection has potential to block and is blocking,
* false otherwise.
*/
abstract boolean isBlocked();
/**
* Whether the connection was cancelled by the client.
*
* @return True if cancelled by the local client, false otherwise.
*/
public boolean isCancelled() {
return mIsCancelled;
}
/**
* Checks this connection for connected status.
*
* @return True if this connection is connected, false otherwise.
*/
public boolean isConnected() {
boolean isConnected = false;
try {
isConnected = mConnectionStatus.tryAcquire();
} finally {
if (isConnected) {
mConnectionStatus.release();
}
}
return isConnected;
}
/**
* Checks this connection for connecting status.
*
* @return True if this connection is connecting, false otherwise.
*/
public boolean isConnecting() {
return mIsConnecting;
}
/**
* Remove a listener from this connection.
*
* @param listener The listener to add for this connection.
*/
public void removeListener(final MPDConnectionListener listener) {
if (mConnectionListeners.contains(listener)) {
mConnectionListeners.remove(listener);
}
}
/**
* This should implement what occurs when a connection is blocking, if it has potential to
* block.
*/
abstract void setBlocked();
/**
* This should implement what occurs when a connection is not blocking, if it has potential to
* block.
*/
abstract void setNotBlocked();
/**
* This is called when called by the disconnection timer.
*
* @param reason The reason for the connection cancellation.
*/
void statusChangeCancelled(final String reason) {
mIsCancelled = true;
statusChangeDisconnected(reason);
}
/**
* This is called when the connection is dropped by the client, itself.
*/
void statusChangeCancelled() {
statusChangeCancelled("Cancelled by client.");
}
/**
* Changes the status of the connection to connected.
*
* @see #statusChangeDisconnected(String)
*/
void statusChangeConnected() {
try {
if (!mConnectionStatus.tryAcquire()) {
debug("Status changed to connected.");
mIsCancelled = false;
mIsConnecting = false;
mLastChangeTime = System.currentTimeMillis();
MPDExecutor.submitCallback(new Runnable() {
@Override
public void run() {
mConnectionListener.connectionConnected(0);
}
});
}
} finally {
mConnectionStatus.release();
}
}
/**
* Changes the status of this connection to a transient 'Connecting' status.
*/
void statusChangeConnecting() {
if (!mIsConnecting) {
mLastChangeTime = System.currentTimeMillis();
debug("Status changed to connecting");
mIsCancelled = false;
mIsConnecting = true;
/**
* Acquire a permit, if available. This signifies that we're disconnected, which is
* implied by connecting.
*/
mConnectionStatus.tryAcquire();
MPDExecutor.submitCallback(new Runnable() {
@Override
public void run() {
mConnectionListener.connectionConnecting();
}
});
}
}
/**
* Changes the status of the connection to disconnected.
*
* @param reason The reason for the disconnection.
* @see #statusChangeConnected()
*/
void statusChangeDisconnected(final String reason) {
if (mConnectionStatus.tryAcquire() || mIsConnecting) {
debug("Status changed to disconnected: " + reason);
mIsConnecting = false;
mLastChangeTime = System.currentTimeMillis();
MPDExecutor.submitCallback(new Runnable() {
@Override
public void run() {
mConnectionListener.connectionDisconnected(reason);
}
});
}
}
@Override
public String toString() {
return "MPDConnectionStatus{" +
"mConnectionListeners=" + mConnectionListeners +
", mConnectionStatus=" + mConnectionStatus +
", mConnectionListener=" + mConnectionListener +
", mIsCancelled=" + mIsCancelled +
", mIsConnecting=" + mIsConnecting +
", mLastChangeTime=" + mLastChangeTime +
'}';
}
/**
* This unsets the cancelled connection status, allowing new connections to initiate.
*/
void unsetCancelled() {
mIsCancelled = false;
}
/**
* This method blocks indefinitely, when not connected, until connection, unless interrupted.
*
* @throws InterruptedException If the current thread is interrupted.
* @see #waitForConnection(long, TimeUnit)
*/
public void waitForConnection() throws InterruptedException {
try {
mConnectionStatus.acquire();
} finally {
mConnectionStatus.release();
}
}
/**
* This method blocks when not connected until connected, or timeout.
*
* @param timeout The maximum time to wait for a connection.
* @param unit The time unit of the {@code timeout} argument.
* @return True if the connected within time limit, false otherwise.
* @throws InterruptedException If the current thread is interrupted.
* @see #waitForConnection()
*/
@SuppressWarnings("BooleanMethodNameMustStartWithQuestion")
public boolean waitForConnection(final long timeout, final TimeUnit unit)
throws InterruptedException {
boolean connectionAcquired = false;
try {
connectionAcquired = mConnectionStatus.tryAcquire(timeout, unit);
} finally {
if (connectionAcquired) {
mConnectionStatus.release();
}
}
return connectionAcquired;
}
}
| |
package org.apache.hadoop.tools.posum.data.monitor.cluster;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.split.JobSplit;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.tools.posum.client.data.Database;
import org.apache.hadoop.tools.posum.common.records.call.FindByIdCall;
import org.apache.hadoop.tools.posum.common.records.call.JobForAppCall;
import org.apache.hadoop.tools.posum.common.records.dataentity.AppProfile;
import org.apache.hadoop.tools.posum.common.records.dataentity.CountersProxy;
import org.apache.hadoop.tools.posum.common.records.dataentity.DataEntityCollection;
import org.apache.hadoop.tools.posum.common.records.dataentity.ExternalDeadline;
import org.apache.hadoop.tools.posum.common.records.dataentity.JobConfProxy;
import org.apache.hadoop.tools.posum.common.records.dataentity.JobProfile;
import org.apache.hadoop.tools.posum.common.records.payload.SingleEntityPayload;
import org.apache.hadoop.tools.posum.common.util.PosumException;
import org.apache.hadoop.tools.posum.common.util.cluster.ClusterUtils;
import org.apache.hadoop.tools.posum.common.util.communication.RestClient;
import org.apache.hadoop.tools.posum.common.util.conf.PosumConfiguration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.Records;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.apache.hadoop.tools.posum.common.util.conf.PosumConfiguration.DATABASE_DEADLINES;
import static org.apache.hadoop.tools.posum.common.util.conf.PosumConfiguration.DATABASE_DEADLINES_DEFAULT;
class JobInfoCollector {
private static Log logger = LogFactory.getLog(JobInfoCollector.class);
private static final String OLD_MAP_CLASS_ATTR = "mapred.mapper.class";
private static final String OLD_REDUCE_CLASS_ATTR = "mapred.reducer.class";
private HadoopAPIClient api;
private Database db;
private HdfsReader hdfsReader;
private boolean databaseDeadlines;
JobInfoCollector() {
}
JobInfoCollector(Configuration conf, Database db) {
this.api = new HadoopAPIClient();
this.db = db;
try {
this.hdfsReader = new HdfsReader(conf);
} catch (IOException e) {
throw new PosumException("Cannot not access HDFS ", e);
}
this.databaseDeadlines = conf.getBoolean(DATABASE_DEADLINES, DATABASE_DEADLINES_DEFAULT);
}
JobInfo getRunningJobInfo(AppProfile app) {
JobInfo info = new JobInfo();
JobProfile profile = getCurrentProfileForApp(app);
if (profile == null) {
// if not found, force the reading of the configuration
try {
logger.debug("Forcing fetch of job info from conf for " + app.getId());
info = getSubmittedJobInfo(app.getId(), app.getUser());
if (info.getProfile() != null)
profile = info.getProfile();
} catch (Exception e) {
logger.debug("Could not retrieve job info for app " + app.getId(), e);
}
}
if (RestClient.TrackingUI.AM.equals(app.getTrackingUI())) {
JobProfile newProfile = api.getRunningJobInfo(app.getId(), app.getQueue(), profile);
if (newProfile == null)
// job might have finished; return
return null;
profile = newProfile;
if (!api.addRunningAttemptInfo(profile)) {
return null;
}
// get counters
CountersProxy counters = api.getRunningJobCounters(app.getId(), profile.getId());
if (counters == null)
// job might have finished; return
return null;
info.setJobCounters(counters);
}
if (profile == null)
return null;
if (profile.getSubmitTime() == null)
profile.setSubmitTime(app.getStartTime());
if (databaseDeadlines && profile.getDeadline() == null) {
setDatabaseDeadline(profile);
}
info.setProfile(profile);
return info;
}
JobInfo getFinishedJobInfo(AppProfile app) {
JobInfo info = new JobInfo();
JobProfile profile = getFinishedJobProfile(app);
info.setProfile(profile);
if (profile == null) {
logger.warn("No finished job info for " + app.getId());
return null;
}
api.addFinishedAttemptInfo(profile);
JobConfProxy jobConf = api.getFinishedJobConf(profile.getId());
setClassNames(profile, jobConf);
if (profile.getDeadline() == null) {
if (databaseDeadlines)
setDatabaseDeadline(profile);
else
setDeadlineFromConf(profile, jobConf);
}
info.setConf(jobConf);
CountersProxy counters = api.getFinishedJobCounters(profile.getId());
info.setJobCounters(counters);
ClusterUtils.updateJobStatisticsFromCounters(profile, counters);
return info;
}
private JobProfile getFinishedJobProfile(AppProfile app) {
String appId = app.getId();
JobProfile job = getCurrentProfileForApp(app);
if (job == null) {
// there is no running record of the job
job = api.getFinishedJobInfo(appId);
} else {
// merge the running info with the history info
job = api.getFinishedJobInfo(appId, job.getId(), job);
}
return job;
}
private JobProfile getCurrentProfileForApp(AppProfile app) {
SingleEntityPayload ret = db.execute(JobForAppCall.newInstance(app.getId()));
if (ret == null)
return null;
return ret.getEntity();
}
JobInfo getSubmittedJobInfo(String appId,
String user) throws IOException {
ApplicationId realAppId = ClusterUtils.parseApplicationId(appId);
JobId jobId = MRBuilderUtils.newJobId(realAppId, realAppId.getId());
final JobConfProxy confProxy = hdfsReader.getSubmittedConf(jobId, user);
return getJobInfoFromConf(jobId, confProxy);
}
private JobInfo getJobInfoFromConf(JobId jobId, JobConfProxy jobConfProxy) throws IOException {
JobProfile job = Records.newRecord(JobProfile.class);
job.setId(jobId.toString());
job.setAppId(jobId.getAppId().toString());
job.setName(jobConfProxy.getEntry(MRJobConfig.JOB_NAME));
job.setUser(jobConfProxy.getEntry(MRJobConfig.USER_NAME));
job.setQueue(jobConfProxy.getEntry(MRJobConfig.QUEUE_NAME));
setClassNames(job, jobConfProxy);
if (!databaseDeadlines)
setDeadlineFromConf(job, jobConfProxy);
// read split info
JobSplit.TaskSplitMetaInfo[] taskSplitMetaInfo = hdfsReader.getSplitMetaInfo(jobId, jobConfProxy);
job.setTotalMapTasks(taskSplitMetaInfo.length);
// add one map task stub per split
long inputLength = 0;
List<List<String>> splitLocations = new ArrayList<>(taskSplitMetaInfo.length);
List<Long> splitSizes = new ArrayList<>(taskSplitMetaInfo.length);
for (int i = 0; i < taskSplitMetaInfo.length; i++) {
JobSplit.TaskSplitMetaInfo aTaskSplitMetaInfo = taskSplitMetaInfo[i];
long splitSize = aTaskSplitMetaInfo.getInputDataLength();
inputLength += splitSize;
splitSizes.add(splitSize);
splitLocations.add(Arrays.asList(aTaskSplitMetaInfo.getLocations()));
}
job.setTotalSplitSize(inputLength);
job.setSplitLocations(splitLocations);
job.setSplitSizes(splitSizes);
// add reduce task stubs according to configuration
int reduces = 0;
String reducesString = jobConfProxy.getEntry(MRJobConfig.NUM_REDUCES);
if (reducesString != null && reducesString.length() > 0)
reduces = Integer.valueOf(reducesString);
job.setTotalReduceTasks(reduces);
return new JobInfo(job, jobConfProxy);
}
private void setDeadlineFromConf(JobProfile job, JobConfProxy confProxy) {
String deadlineString = confProxy.getEntry(PosumConfiguration.APP_DEADLINE);
if (deadlineString != null)
job.setDeadline(Long.valueOf(deadlineString));
else
job.setDeadline(0L);
}
private void setDatabaseDeadline(JobProfile job) {
FindByIdCall findDeadline = FindByIdCall.newInstance(DataEntityCollection.DEADLINE, job.getId());
ExternalDeadline deadline = db.execute(findDeadline).getEntity();
if (deadline != null)
job.setDeadline(deadline.getDeadline());
}
private void setClassNames(JobProfile profile, JobConfProxy conf) {
String classString = conf.getEntry(MRJobConfig.MAP_CLASS_ATTR);
if (classString == null)
classString = conf.getEntry(OLD_MAP_CLASS_ATTR);
if (classString == null)
profile.setReducerClass(IdentityMapper.class.getName());
profile.setMapperClass(classString);
classString = conf.getEntry(MRJobConfig.REDUCE_CLASS_ATTR);
if (classString == null)
classString = conf.getEntry(OLD_REDUCE_CLASS_ATTR);
if (classString == null)
profile.setReducerClass(IdentityReducer.class.getName());
profile.setReducerClass(classString);
}
}
| |
/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.client;
import java.io.IOException;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.HttpVersion;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.commons.httpclient.URI;
import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
import org.apache.commons.httpclient.methods.DeleteMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.HeadMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.PutMethod;
import org.apache.commons.httpclient.params.HttpClientParams;
import org.apache.commons.httpclient.params.HttpConnectionManagerParams;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* A wrapper around HttpClient which provides some useful function and
* semantics for interacting with the REST gateway.
*/
public class Client {
public static final Header[] EMPTY_HEADER_ARRAY = new Header[0];
private static final Log LOG = LogFactory.getLog(Client.class);
private HttpClient httpClient;
private Cluster cluster;
/**
* Default Constructor
*/
public Client() {
this(null);
}
/**
* Constructor
* @param cluster the cluster definition
*/
public Client(Cluster cluster) {
this.cluster = cluster;
MultiThreadedHttpConnectionManager manager =
new MultiThreadedHttpConnectionManager();
HttpConnectionManagerParams managerParams = manager.getParams();
managerParams.setConnectionTimeout(2000); // 2 s
managerParams.setDefaultMaxConnectionsPerHost(10);
managerParams.setMaxTotalConnections(100);
this.httpClient = new HttpClient(manager);
HttpClientParams clientParams = httpClient.getParams();
clientParams.setVersion(HttpVersion.HTTP_1_1);
}
/**
* Shut down the client. Close any open persistent connections.
*/
public void shutdown() {
MultiThreadedHttpConnectionManager manager =
(MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
manager.shutdown();
}
/**
* Execute a transaction method given only the path. Will select at random
* one of the members of the supplied cluster definition and iterate through
* the list until a transaction can be successfully completed. The
* definition of success here is a complete HTTP transaction, irrespective
* of result code.
* @param cluster the cluster definition
* @param method the transaction method
* @param headers HTTP header values to send
* @param path the path
* @return the HTTP response code
* @throws IOException
*/
@SuppressWarnings("deprecation")
public int executePathOnly(Cluster cluster, HttpMethod method,
Header[] headers, String path) throws IOException {
IOException lastException;
if (cluster.nodes.size() < 1) {
throw new IOException("Cluster is empty");
}
int start = (int)Math.round((cluster.nodes.size() - 1) * Math.random());
int i = start;
do {
cluster.lastHost = cluster.nodes.get(i);
try {
StringBuilder sb = new StringBuilder();
sb.append("http://");
sb.append(cluster.lastHost);
sb.append(path);
URI uri = new URI(sb.toString());
return executeURI(method, headers, uri.toString());
} catch (IOException e) {
lastException = e;
}
} while (++i != start && i < cluster.nodes.size());
throw lastException;
}
/**
* Execute a transaction method given a complete URI.
* @param method the transaction method
* @param headers HTTP header values to send
* @param uri the URI
* @return the HTTP response code
* @throws IOException
*/
@SuppressWarnings("deprecation")
public int executeURI(HttpMethod method, Header[] headers, String uri)
throws IOException {
method.setURI(new URI(uri));
if (headers != null) {
for (Header header: headers) {
method.addRequestHeader(header);
}
}
long startTime = System.currentTimeMillis();
int code = httpClient.executeMethod(method);
long endTime = System.currentTimeMillis();
if (LOG.isDebugEnabled()) {
LOG.debug(method.getName() + " " + uri + " " + code + " " +
method.getStatusText() + " in " + (endTime - startTime) + " ms");
}
return code;
}
/**
* Execute a transaction method. Will call either <tt>executePathOnly</tt>
* or <tt>executeURI</tt> depending on whether a path only is supplied in
* 'path', or if a complete URI is passed instead, respectively.
* @param cluster the cluster definition
* @param method the HTTP method
* @param headers HTTP header values to send
* @param path the path or URI
* @return the HTTP response code
* @throws IOException
*/
public int execute(Cluster cluster, HttpMethod method, Header[] headers,
String path) throws IOException {
if (path.startsWith("/")) {
return executePathOnly(cluster, method, headers, path);
}
return executeURI(method, headers, path);
}
/**
* @return the cluster definition
*/
public Cluster getCluster() {
return cluster;
}
/**
* @param cluster the cluster definition
*/
public void setCluster(Cluster cluster) {
this.cluster = cluster;
}
/**
* Send a HEAD request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
*/
public Response head(String path) throws IOException {
return head(cluster, path, null);
}
/**
* Send a HEAD request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
public Response head(Cluster cluster, String path, Header[] headers)
throws IOException {
HeadMethod method = new HeadMethod();
try {
int code = execute(cluster, method, null, path);
headers = method.getResponseHeaders();
return new Response(code, headers, null);
} finally {
method.releaseConnection();
}
}
/**
* Send a GET request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
*/
public Response get(String path) throws IOException {
return get(cluster, path);
}
/**
* Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
*/
public Response get(Cluster cluster, String path) throws IOException {
return get(cluster, path, EMPTY_HEADER_ARRAY);
}
/**
* Send a GET request
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
* @throws IOException
*/
public Response get(String path, String accept) throws IOException {
return get(cluster, path, accept);
}
/**
* Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
* @throws IOException
*/
public Response get(Cluster cluster, String path, String accept)
throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Accept", accept);
return get(cluster, path, headers);
}
/**
* Send a GET request
* @param path the path or URI
* @param headers the HTTP headers to include in the request,
* <tt>Accept</tt> must be supplied
* @return a Response object with response detail
* @throws IOException
*/
public Response get(String path, Header[] headers) throws IOException {
return get(cluster, path, headers);
}
/**
* Send a GET request
* @param c the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
public Response get(Cluster c, String path, Header[] headers)
throws IOException {
GetMethod method = new GetMethod();
try {
int code = execute(c, method, headers, path);
headers = method.getResponseHeaders();
byte[] body = method.getResponseBody();
return new Response(code, headers, body);
} finally {
method.releaseConnection();
}
}
/**
* Send a PUT request
* @param path the path or URI
* @param contentType the content MIME type
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response put(String path, String contentType, byte[] content)
throws IOException {
return put(cluster, path, contentType, content);
}
/**
* Send a PUT request
* @param cluster the cluster definition
* @param path the path or URI
* @param contentType the content MIME type
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response put(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
return put(cluster, path, headers, content);
}
/**
* Send a PUT request
* @param path the path or URI
* @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
* supplied
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response put(String path, Header[] headers, byte[] content)
throws IOException {
return put(cluster, path, headers, content);
}
/**
* Send a PUT request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
* supplied
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response put(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PutMethod method = new PutMethod();
try {
method.setRequestEntity(new ByteArrayRequestEntity(content));
int code = execute(cluster, method, headers, path);
headers = method.getResponseHeaders();
content = method.getResponseBody();
return new Response(code, headers, content);
} finally {
method.releaseConnection();
}
}
/**
* Send a POST request
* @param path the path or URI
* @param contentType the content MIME type
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response post(String path, String contentType, byte[] content)
throws IOException {
return post(cluster, path, contentType, content);
}
/**
* Send a POST request
* @param cluster the cluster definition
* @param path the path or URI
* @param contentType the content MIME type
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response post(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
return post(cluster, path, headers, content);
}
/**
* Send a POST request
* @param path the path or URI
* @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
* supplied
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response post(String path, Header[] headers, byte[] content)
throws IOException {
return post(cluster, path, headers, content);
}
/**
* Send a POST request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
* supplied
* @param content the content bytes
* @return a Response object with response detail
* @throws IOException
*/
public Response post(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PostMethod method = new PostMethod();
try {
method.setRequestEntity(new ByteArrayRequestEntity(content));
int code = execute(cluster, method, headers, path);
headers = method.getResponseHeaders();
content = method.getResponseBody();
return new Response(code, headers, content);
} finally {
method.releaseConnection();
}
}
/**
* Send a DELETE request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
*/
public Response delete(String path) throws IOException {
return delete(cluster, path);
}
/**
* Send a DELETE request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
*/
public Response delete(Cluster cluster, String path) throws IOException {
DeleteMethod method = new DeleteMethod();
try {
int code = execute(cluster, method, null, path);
Header[] headers = method.getResponseHeaders();
byte[] content = method.getResponseBody();
return new Response(code, headers, content);
} finally {
method.releaseConnection();
}
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spi.impl.operationservice.impl;
import com.hazelcast.client.impl.protocol.task.MessageTask;
import com.hazelcast.cluster.ClusterState;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.core.HazelcastInstanceNotActiveException;
import com.hazelcast.instance.MemberImpl;
import com.hazelcast.instance.Node;
import com.hazelcast.instance.NodeState;
import com.hazelcast.instance.OutOfMemoryErrorDispatcher;
import com.hazelcast.internal.metrics.MetricsProvider;
import com.hazelcast.internal.metrics.MetricsRegistry;
import com.hazelcast.internal.metrics.Probe;
import com.hazelcast.internal.partition.InternalPartition;
import com.hazelcast.internal.serialization.impl.SerializationServiceV1;
import com.hazelcast.internal.util.counters.Counter;
import com.hazelcast.logging.ILogger;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.Connection;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.Packet;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.quorum.QuorumException;
import com.hazelcast.quorum.impl.QuorumServiceImpl;
import com.hazelcast.spi.BlockingOperation;
import com.hazelcast.spi.CallStatus;
import com.hazelcast.spi.Notifier;
import com.hazelcast.spi.Offload;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.OperationResponseHandler;
import com.hazelcast.spi.ReadonlyOperation;
import com.hazelcast.spi.exception.CallerNotMemberException;
import com.hazelcast.spi.exception.PartitionMigratingException;
import com.hazelcast.spi.exception.ResponseAlreadySentException;
import com.hazelcast.spi.exception.RetryableException;
import com.hazelcast.spi.exception.RetryableHazelcastException;
import com.hazelcast.spi.exception.WrongTargetException;
import com.hazelcast.spi.impl.AllowedDuringPassiveState;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.impl.operationexecutor.OperationRunner;
import com.hazelcast.spi.impl.operationservice.impl.operations.Backup;
import com.hazelcast.spi.impl.operationservice.impl.responses.CallTimeoutResponse;
import com.hazelcast.spi.impl.operationservice.impl.responses.ErrorResponse;
import com.hazelcast.spi.impl.operationservice.impl.responses.NormalResponse;
import com.hazelcast.util.ExceptionUtil;
import java.io.IOException;
import java.util.logging.Level;
import static com.hazelcast.internal.metrics.ProbeLevel.DEBUG;
import static com.hazelcast.internal.util.counters.MwCounter.newMwCounter;
import static com.hazelcast.internal.util.counters.SwCounter.newSwCounter;
import static com.hazelcast.spi.CallStatus.DONE_RESPONSE_ORDINAL;
import static com.hazelcast.spi.CallStatus.DONE_VOID_ORDINAL;
import static com.hazelcast.spi.CallStatus.OFFLOAD_ORDINAL;
import static com.hazelcast.spi.CallStatus.WAIT_ORDINAL;
import static com.hazelcast.spi.OperationAccessor.setCallerAddress;
import static com.hazelcast.spi.OperationAccessor.setConnection;
import static com.hazelcast.spi.impl.OperationResponseHandlerFactory.createEmptyResponseHandler;
import static com.hazelcast.spi.impl.operationutil.Operations.isJoinOperation;
import static com.hazelcast.spi.impl.operationutil.Operations.isMigrationOperation;
import static com.hazelcast.spi.impl.operationutil.Operations.isWanReplicationOperation;
import static com.hazelcast.spi.properties.GroupProperty.DISABLE_STALE_READ_ON_PARTITION_MIGRATION;
import static java.util.logging.Level.FINEST;
import static java.util.logging.Level.SEVERE;
import static java.util.logging.Level.WARNING;
/**
* Responsible for processing an Operation.
*/
@SuppressWarnings("checkstyle:classfanoutcomplexity")
class OperationRunnerImpl extends OperationRunner implements MetricsProvider {
static final int AD_HOC_PARTITION_ID = -2;
private final ILogger logger;
private final OperationServiceImpl operationService;
private final Node node;
private final NodeEngineImpl nodeEngine;
@Probe(level = DEBUG)
private final Counter executedOperationsCounter;
private final Address thisAddress;
private final boolean staleReadOnMigrationEnabled;
private final Counter failedBackupsCounter;
private final OperationBackupHandler backupHandler;
// has only meaning for metrics.
private final int genericId;
// This field doesn't need additional synchronization, since a partition-specific OperationRunner
// will never be called concurrently.
private InternalPartition internalPartition;
private final OutboundResponseHandler outboundResponseHandler;
// When partitionId >= 0, it is a partition specific
// when partitionId = -1, it is generic
// when partitionId = -2, it is ad hoc
// an ad-hoc OperationRunner can only process generic operations, but it can be shared between threads
// and therefor the {@link OperationRunner#currentTask()} always returns null
OperationRunnerImpl(OperationServiceImpl operationService, int partitionId, int genericId, Counter failedBackupsCounter) {
super(partitionId);
this.genericId = genericId;
this.operationService = operationService;
this.logger = operationService.node.getLogger(OperationRunnerImpl.class);
this.node = operationService.node;
this.thisAddress = node.getThisAddress();
this.nodeEngine = operationService.nodeEngine;
this.outboundResponseHandler = operationService.outboundResponseHandler;
this.staleReadOnMigrationEnabled = !node.getProperties().getBoolean(DISABLE_STALE_READ_ON_PARTITION_MIGRATION);
this.failedBackupsCounter = failedBackupsCounter;
this.backupHandler = operationService.backupHandler;
// only a ad-hoc operation runner will be called concurrently
this.executedOperationsCounter = partitionId == AD_HOC_PARTITION_ID ? newMwCounter() : newSwCounter();
}
@Override
public long executedOperationsCount() {
return executedOperationsCounter.get();
}
@Override
public void provideMetrics(MetricsRegistry registry) {
if (partitionId >= 0) {
registry.scanAndRegister(this, "operation.partition[" + partitionId + "]");
} else if (partitionId == -1) {
registry.scanAndRegister(this, "operation.generic[" + genericId + "]");
} else {
registry.scanAndRegister(this, "operation.adhoc");
}
}
@Override
public void run(Runnable task) {
boolean publishCurrentTask = publishCurrentTask();
if (publishCurrentTask) {
currentTask = task;
}
try {
task.run();
} finally {
if (publishCurrentTask) {
currentTask = null;
}
}
}
private boolean publishCurrentTask() {
boolean isClientRunnable = currentTask instanceof MessageTask;
return getPartitionId() != AD_HOC_PARTITION_ID && (currentTask == null || isClientRunnable);
}
@Override
public void run(Operation op) {
executedOperationsCounter.inc();
boolean publishCurrentTask = publishCurrentTask();
if (publishCurrentTask) {
currentTask = op;
}
try {
checkNodeState(op);
if (timeout(op)) {
return;
}
ensureNoPartitionProblems(op);
ensureQuorumPresent(op);
op.beforeRun();
call(op);
} catch (Throwable e) {
handleOperationError(op, e);
} finally {
if (publishCurrentTask) {
currentTask = null;
}
}
}
private void call(Operation op) throws Exception {
CallStatus callStatus = op.call();
switch (callStatus.ordinal()) {
case DONE_RESPONSE_ORDINAL:
handleResponse(op);
afterRun(op);
break;
case DONE_VOID_ORDINAL:
op.afterRun();
break;
case OFFLOAD_ORDINAL:
op.afterRun();
Offload offload = (Offload) callStatus;
offload.init(nodeEngine, operationService.asyncOperations);
offload.start();
break;
case WAIT_ORDINAL:
nodeEngine.getOperationParker().park((BlockingOperation) op);
break;
default:
throw new IllegalStateException();
}
}
private void handleResponse(Operation op) throws Exception {
int backupAcks = backupHandler.sendBackups(op);
try {
Object response = op.getResponse();
if (backupAcks > 0) {
response = new NormalResponse(response, op.getCallId(), backupAcks, op.isUrgent());
}
op.sendResponse(response);
} catch (ResponseAlreadySentException e) {
logOperationError(op, e);
}
}
private void checkNodeState(Operation op) {
NodeState state = node.getState();
if (state == NodeState.ACTIVE) {
return;
}
Address localAddress = node.getThisAddress();
if (state == NodeState.SHUT_DOWN) {
throw new HazelcastInstanceNotActiveException("Member " + localAddress + " is shut down! Operation: " + op);
}
if (op instanceof AllowedDuringPassiveState) {
return;
}
// Cluster is in passive state. There is no need to retry.
if (nodeEngine.getClusterService().getClusterState() == ClusterState.PASSIVE) {
throw new IllegalStateException("Cluster is in " + ClusterState.PASSIVE + " state! Operation: " + op);
}
// Operation has no partition ID, so it's sent to this node in purpose.
// Operation will fail since node is shutting down or cluster is passive.
if (op.getPartitionId() < 0) {
throw new HazelcastInstanceNotActiveException("Member " + localAddress + " is currently passive! Operation: " + op);
}
// Custer is not passive but this node is shutting down.
// Since operation has a partition ID, it must be retried on another node.
throw new RetryableHazelcastException("Member " + localAddress + " is currently shutting down! Operation: " + op);
}
/**
* Ensures that the quorum is present if the quorum is configured and the operation service is quorum aware.
*
* @param op the operation for which the quorum must be checked for presence
* @throws QuorumException if the operation requires a quorum and the quorum is not present
*/
private void ensureQuorumPresent(Operation op) {
QuorumServiceImpl quorumService = operationService.nodeEngine.getQuorumService();
quorumService.ensureQuorumPresent(op);
}
private boolean timeout(Operation op) {
if (!operationService.isCallTimedOut(op)) {
return false;
}
op.sendResponse(new CallTimeoutResponse(op.getCallId(), op.isUrgent()));
return true;
}
private void afterRun(Operation op) {
try {
op.afterRun();
if (op instanceof Notifier) {
final Notifier notifier = (Notifier) op;
if (notifier.shouldNotify()) {
operationService.nodeEngine.getOperationParker().unpark(notifier);
}
}
} catch (Throwable e) {
// passed the response phase
// `afterRun` and `notifier` errors cannot be sent to the caller anymore
// just log the error
logOperationError(op, e);
}
}
private void ensureNoPartitionProblems(Operation op) {
int partitionId = op.getPartitionId();
if (partitionId < 0) {
return;
}
if (partitionId != getPartitionId()) {
throw new IllegalStateException("wrong partition, expected: " + getPartitionId() + " but found:" + partitionId);
}
if (internalPartition == null) {
internalPartition = nodeEngine.getPartitionService().getPartition(partitionId);
}
if (!isAllowedToRetryDuringMigration(op) && internalPartition.isMigrating()) {
throw new PartitionMigratingException(thisAddress, partitionId,
op.getClass().getName(), op.getServiceName());
}
Address owner = internalPartition.getReplicaAddress(op.getReplicaIndex());
if (op.validatesTarget() && !thisAddress.equals(owner)) {
throw new WrongTargetException(thisAddress, owner, partitionId, op.getReplicaIndex(),
op.getClass().getName(), op.getServiceName());
}
}
private boolean isAllowedToRetryDuringMigration(Operation op) {
return (op instanceof ReadonlyOperation && staleReadOnMigrationEnabled) || isMigrationOperation(op);
}
private void handleOperationError(Operation operation, Throwable e) {
if (e instanceof OutOfMemoryError) {
OutOfMemoryErrorDispatcher.onOutOfMemory((OutOfMemoryError) e);
}
try {
operation.onExecutionFailure(e);
} catch (Throwable t) {
logger.warning("While calling 'operation.onFailure(e)'... op: " + operation + ", error: " + e, t);
}
operation.logError(e);
if (operation instanceof Backup) {
failedBackupsCounter.inc();
return;
}
// A response is sent regardless of the Operation.returnsResponse method because some operations do want to send
// back a response, but they didn't want to send it yet but they ran into some kind of error. If on the receiving
// side no invocation is waiting, the response is ignored.
sendResponseAfterOperationError(operation, e);
}
private void sendResponseAfterOperationError(Operation operation, Throwable e) {
try {
if (node.getState() != NodeState.SHUT_DOWN) {
operation.sendResponse(errorResponse(operation, e));
} else if (operation.executedLocally()) {
operation.sendResponse(errorResponse(operation, new HazelcastInstanceNotActiveException()));
}
} catch (Throwable t) {
logger.warning("While sending op error... op: " + operation + ", error: " + e, t);
}
}
private void logOperationError(Operation op, Throwable e) {
if (e instanceof OutOfMemoryError) {
OutOfMemoryErrorDispatcher.onOutOfMemory((OutOfMemoryError) e);
}
op.logError(e);
}
@Override
public void run(Packet packet) throws Exception {
boolean publishCurrentTask = publishCurrentTask();
if (publishCurrentTask) {
currentTask = packet;
}
Connection connection = packet.getConn();
Address caller = connection.getEndPoint();
try {
Object object = nodeEngine.toObject(packet);
Operation op = (Operation) object;
op.setNodeEngine(nodeEngine);
setCallerAddress(op, caller);
setConnection(op, connection);
setCallerUuidIfNotSet(caller, op);
setOperationResponseHandler(op);
if (!ensureValidMember(op)) {
return;
}
if (publishCurrentTask) {
currentTask = null;
}
run(op);
} catch (Throwable throwable) {
// If exception happens we need to extract the callId from the bytes directly!
long callId = extractOperationCallId(packet);
outboundResponseHandler.send(caller, new ErrorResponse(throwable, callId, packet.isUrgent()));
logOperationDeserializationException(throwable, callId);
throw ExceptionUtil.rethrow(throwable);
} finally {
if (publishCurrentTask) {
currentTask = null;
}
}
}
private ErrorResponse errorResponse(Operation op, Throwable error) {
return new ErrorResponse(error, op.getCallId(), op.isUrgent());
}
/**
* This method has a direct dependency on how objects are serialized.
* If the stream format is changed, this extraction method must be changed as well.
* <p>
* It makes an assumption that the callId is the first long field in the serialized operation.
*/
private long extractOperationCallId(Data data) throws IOException {
ObjectDataInput input = ((SerializationServiceV1) node.getSerializationService())
.initDataSerializableInputAndSkipTheHeader(data);
return input.readLong();
}
private void setOperationResponseHandler(Operation op) {
OperationResponseHandler handler = outboundResponseHandler;
if (op.getCallId() == 0) {
if (op.returnsResponse()) {
throw new HazelcastException(
"Operation " + op + " wants to return a response, but doesn't have a call ID");
}
handler = createEmptyResponseHandler();
}
op.setOperationResponseHandler(handler);
}
private boolean ensureValidMember(Operation op) {
if (node.clusterService.getMember(op.getCallerAddress()) != null
|| isJoinOperation(op)
|| isWanReplicationOperation(op)) {
return true;
}
Exception error = new CallerNotMemberException(thisAddress, op.getCallerAddress(), op.getPartitionId(),
op.getClass().getName(), op.getServiceName());
handleOperationError(op, error);
return false;
}
private void setCallerUuidIfNotSet(Address caller, Operation op) {
if (op.getCallerUuid() != null) {
return;
}
MemberImpl callerMember = node.clusterService.getMember(caller);
if (callerMember != null) {
op.setCallerUuid(callerMember.getUuid());
}
}
private void logOperationDeserializationException(Throwable t, long callId) {
boolean returnsResponse = callId != 0;
if (t instanceof RetryableException) {
final Level level = returnsResponse ? FINEST : WARNING;
if (logger.isLoggable(level)) {
logger.log(level, t.getClass().getName() + ": " + t.getMessage());
}
} else if (t instanceof OutOfMemoryError) {
try {
logger.severe(t.getMessage(), t);
} catch (Throwable ignored) {
logger.severe(ignored.getMessage(), t);
}
} else {
final Level level = nodeEngine.isRunning() ? SEVERE : FINEST;
if (logger.isLoggable(level)) {
logger.log(level, t.getMessage(), t);
}
}
}
}
| |
package me.onenrico.holoblock.object;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.OfflinePlayer;
import org.bukkit.SkullType;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.block.BlockState;
import org.bukkit.block.Skull;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import org.bukkit.scheduler.BukkitRunnable;
import org.bukkit.scheduler.BukkitTask;
import com.gmail.filoghost.holographicdisplays.api.Hologram;
import com.gmail.filoghost.holographicdisplays.api.line.ItemLine;
import com.gmail.filoghost.holographicdisplays.api.line.TextLine;
import com.mojang.authlib.GameProfile;
import me.clip.placeholderapi.PlaceholderAPI;
import me.onenrico.holoblock.api.HoloBlockAPI;
import me.onenrico.holoblock.database.Datamanager;
import me.onenrico.holoblock.main.Core;
import me.onenrico.holoblock.utils.HoloUT;
import me.onenrico.holoblock.utils.ItemUT;
import me.onenrico.holoblock.utils.MathUT;
import me.onenrico.holoblock.utils.MessageUT;
import me.onenrico.holoblock.utils.ParticleUT;
import me.onenrico.holoblock.utils.PermissionUT;
import me.onenrico.holoblock.utils.PlayerUT;
import me.onenrico.holoblock.utils.ReflectionUT;
public class HoloData {
private List<String> members;
private List<String> lines;
private String owner;
private String rawloc;
private BlockFace rotation;
private Location realloc;
private Hologram hologram;
private Location cloc;
private String skin;
private String particlename;
private BukkitTask particle;
private BukkitTask potiontask;
private List<PotionEffect> potioneffect = new ArrayList<>();
private double offset;
private boolean allowPlaceholders = false;
private boolean allowColor = false;
private boolean allowItemLine = false;
private boolean allowCustomSkin = false;
public HoloData(String loc) {
rawloc = loc;
realloc = Seriloc.Deserialize(loc);
owner = Datamanager.getDB().getOwner(loc);
lines = Datamanager.getDB().getLine(loc);
members = Datamanager.getDB().getMember(loc);
skin = Datamanager.getDB().getSkin(loc);
rotation = Datamanager.getDB().getRotation(loc);
particlename = Datamanager.getDB().getParticleName(loc);
if (particlename == null) {
particlename = "NONE";
}
particlename = particlename.toUpperCase();
if (members == null) {
members = new ArrayList<String>();
}
if (lines == null) {
lines = new ArrayList<>();
} else {
int index = 0;
for (String l : lines) {
if (l.equalsIgnoreCase("{#n}")) {
lines.set(index, "");
}
index++;
}
}
if (owner == null) {
owner = "Prepared";
}
if (skin == null) {
skin = Core.getThis().configplugin.getStr("holo.item.head", "SecurityCamera");
}
updatePerm();
updateSkin();
updateHolo();
float toffset = (float) (offset * -1) + .1f;
if (particlename.equalsIgnoreCase("NONE")) {
particle = ParticleUT.circleParticle(cloc, 0, toffset, toffset, 0f, "SPELL_MOB");
} else {
particle = ParticleUT.circleParticle(cloc, 0, toffset, toffset, 0f, particlename);
}
potiontask = new BukkitRunnable() {
@Override
public void run() {
Collection<Entity> ens = realloc.getWorld().getNearbyEntities(realloc, 15, 15, 15);
for (Entity e : ens) {
if (e instanceof Player) {
if (members.contains(e.getName()) || owner.equals(e.getName())) {
for (PotionEffect pef : potioneffect) {
((Player) e).addPotionEffect(pef, true);
}
}
}
}
}
}.runTaskTimer(Core.getThis(), 20, 200);
}
@SuppressWarnings("deprecation")
public void updatePerm() {
OfflinePlayer ofp = Bukkit.getOfflinePlayer(owner);
World world = realloc.getWorld();
allowColor = PermissionUT.has(ofp, "holoblock.use.color", world);
allowPlaceholders = PermissionUT.has(ofp, "holoblock.use.placeholders", world);
allowItemLine = PermissionUT.has(ofp, "holoblock.use.itemline", world);
allowCustomSkin = PermissionUT.has(ofp, "holoblock.use.customskin", world);
}
@SuppressWarnings("deprecation")
public void updateSkin() {
Block block = realloc.getBlock();
BlockState state = block.getState();
if (!(state instanceof Skull)) {
block.setType(Material.SKULL);
new BukkitRunnable() {
@Override
public void run() {
updateSkin();
}
}.runTaskLater(Core.getThis(), 1);
return;
}
Skull skull = (Skull) state;
if (rotation == null) {
rotation = skull.getRotation();
}
skull.getData().setData((byte) 1);
skull.setRotation(rotation);
skull.setSkullType(SkullType.PLAYER);
skull.update();
updateSkinOnly();
}
private static Method getWorldHandle;
private static Method getWorldTileEntity;
private static Method setGameProfile;
private static Class<?> blockposition;
private static Constructor<?> bpc;
@SuppressWarnings("deprecation")
public void updateSkinOnly() {
Block block = realloc.getBlock();
BlockState state = block.getState();
Skull skull = (Skull) state;
if (skin.startsWith("$CustomSkin:")) {
String nskin = skin.replace("$CustomSkin:", "");
CustomSkin cs = new CustomSkin(nskin);
potioneffect.clear();
for (String pefstr : cs.getPotioneffects()) {
try {
String pefe = pefstr.split(":")[0];
int mod = MathUT.strInt(pefstr.split(":")[1]);
PotionEffectType peft = PotionEffectType.getByName(pefe.toUpperCase());
potioneffect.add(new PotionEffect(peft, 240, mod));
} catch (Exception ex) {
MessageUT.cmessage("Holoblock Potion Effect: " + pefstr + " is Invalid");
continue;
}
}
String type = cs.getType();
GameProfile gp = null;
if (type.equalsIgnoreCase("name")) {
skull.setOwner(cs.getData());
skull.update(true);
return;
} else if (type.equalsIgnoreCase("url")) {
gp = PlayerUT.Skull.getProfile(cs.getData(), false);
} else {
gp = PlayerUT.Skull.getProfile(cs.getData(), true);
}
try {
if (bpc == null) {
blockposition = ReflectionUT.getNMSClass("BlockPosition");
bpc = blockposition.getConstructor(int.class, int.class, int.class);
getWorldHandle = ReflectionUT.getCraftBukkitClass("CraftWorld").getMethod("getHandle");
getWorldTileEntity = ReflectionUT.getNMSClass("WorldServer").getMethod("getTileEntity",
blockposition);
setGameProfile = ReflectionUT.getNMSClass("TileEntitySkull").getMethod("setGameProfile",
GameProfile.class);
}
Object bp = bpc.newInstance(block.getX(), block.getY(), block.getZ());
Object world = getWorldHandle.invoke(skull.getWorld());
Object tileSkull = getWorldTileEntity.invoke(world, bp);
setGameProfile.invoke(tileSkull, gp);
block.getState().update(true);
} catch (Exception ex) {
ex.printStackTrace();
}
// TileEntitySkull skullTile = (TileEntitySkull)
// ((CraftWorld) block.getWorld()).getHandle()
// .getTileEntity(new BlockPosition(block.getX(), block.getY(),
// block.getZ()));
// skullTile.setGameProfile(gp);
// block.getState().update(true);
return;
} else {
skull.setOwner(skin);
}
skull.update(true);
}
public void update() {
updateLines();
updateHolo();
}
public void updateLines() {
if (lines == null) {
lines = new ArrayList<>();
}
if (!lines.isEmpty()) {
lines = new ArrayList<>();
}
for (int x = 0; x < hologram.size(); x++) {
if (hologram == null) {
if (hologram.isDeleted()) {
return;
}
}
Object line = hologram.getLine(x);
if (line instanceof ItemLine) {
ItemLine item = (ItemLine) line;
String stack = "" + item.getItemStack().getType();
if (item.getItemStack().getDurability() > 0) {
stack += ":" + item.getItemStack().getDurability();
}
lines.add("$ItemStack:" + stack);
} else if (line instanceof TextLine) {
TextLine text = (TextLine) line;
String ltext = text.getText();
ltext = ltext.replace("{refresh:fastest}", "");
lines.add(ltext);
}
}
}
public void updateHolo() {
destroyHolo();
offset = Datamanager.getDB().getOffSet(getRawloc());
if (offset == -690) {
offset = HoloBlockAPI.getDefaultOffset();
}
cloc = Seriloc.centered(realloc.clone()).add(0, offset, 0);
hologram = HoloUT.createHologram(cloc, allowPlaceholders);
int index = 0;
if (lines.isEmpty()) {
return;
}
Boolean color = allowColor;
for (String line : lines) {
if (hologram == null) {
if (hologram.isDeleted()) {
break;
}
}
if (line.contains("$ItemStack:")) {
line = line.replace("$ItemStack:", "");
ItemStack item = ItemUT.getItem(line);
HoloUT.setLine(hologram, index, item);
} else {
if (Core.papi != null) {
line = line.replace("{refresh:fastest}", "");
if (isAllowPlaceholders()) {
if (PlaceholderAPI.containsBracketPlaceholders(line)) {
line = "{refresh:fastest}" + line;
}
}
}
if (color) {
HoloUT.setLine(hologram, index, MessageUT.t(line));
} else {
HoloUT.setLine(hologram, index, MessageUT.u(line));
}
}
index++;
}
}
public void addMember(String member) {
members.add(member);
}
public void removeMember(String member) {
members.remove(member);
}
public void setLine(int line, String data) {
if (hologram == null) {
if (hologram.isDeleted()) {
return;
}
}
if (data.contains("$ItemStack:")) {
data = data.replace("$ItemStack:", "");
ItemStack item = ItemUT.getItem(data);
HoloUT.setLine(hologram, line, item);
} else {
if (Core.papi != null) {
data = data.replace("{refresh:fastest}", "");
if (isAllowPlaceholders()) {
if (PlaceholderAPI.containsBracketPlaceholders(data)) {
data = "{refresh:fastest}" + data;
}
}
}
Boolean color = allowColor;
if (color) {
HoloUT.setLine(hologram, line, MessageUT.t(data));
} else {
HoloUT.setLine(hologram, line, MessageUT.u(data));
}
}
}
public void insertLine(int line, String data) {
if (hologram == null) {
if (hologram.isDeleted()) {
return;
}
}
if (data.contains("$ItemStack:")) {
data = data.replace("$ItemStack:", "");
ItemStack item = ItemUT.getItem(data);
HoloUT.insertLine(hologram, line, item);
} else {
if (Core.papi != null) {
data = data.replace("{refresh:fastest}", "");
if (isAllowPlaceholders()) {
if (PlaceholderAPI.containsBracketPlaceholders(data)) {
data = "{refresh:fastest}" + data;
}
}
}
Boolean color = allowColor;
if (color) {
HoloUT.insertLine(hologram, line, MessageUT.t(data));
} else {
HoloUT.insertLine(hologram, line, MessageUT.u(data));
}
}
}
public void removeLine(int line) {
if (hologram == null) {
if (hologram.isDeleted()) {
return;
}
}
try {
HoloUT.removeLine(hologram, line);
} catch (Exception ex) {
}
}
public void destroyHolo() {
if (particle != null) {
particle.cancel();
}
if (hologram == null) {
return;
}
if (hologram.isDeleted()) {
return;
}
hologram.delete();
}
public void destroy() {
destroyHolo();
realloc.getBlock().setType(Material.AIR, true);
realloc.getBlock().getState().update(true);
potiontask.cancel();
}
public void saveHolo(BukkitRunnable callback) {
new BukkitRunnable() {
String rawlines = "";
String rawmembers = "";
@Override
public void run() {
updateLines();
for (int x = 0; x < hologram.size(); x++) {
Object line = hologram.getLine(x);
if (line instanceof ItemLine) {
ItemLine item = (ItemLine) line;
String stack = "" + item.getItemStack().getType();
if (item.getItemStack().getDurability() > 0) {
stack += ":" + item.getItemStack().getDurability();
}
rawlines += "$ItemStack:" + stack;
} else if (line instanceof TextLine) {
TextLine text = (TextLine) line;
String teks = text.getText();
if (teks.isEmpty()) {
teks = "{#n}";
}
rawlines += teks.replace("{refresh:fastest}", "");
}
if (hologram.size() - x > 1) {
rawlines += "<#";
}
}
if (!members.isEmpty()) {
for (int x = 0; x < members.size(); x++) {
rawmembers += members.get(x);
if (members.size() - x > 1) {
rawmembers += "<#";
}
}
}
Datamanager.getDB().setHolo(owner, rawloc, rawlines, rawmembers, offset, skin, rotation, "NONE",
callback);
}
}.runTaskLater(Core.getThis(), 3);
}
public List<String> getMembers() {
return members;
}
public List<String> getLines() {
return lines;
}
public String getOwner() {
return owner;
}
public String getRawloc() {
return rawloc;
}
public Location getRealloc() {
return realloc;
}
public Hologram getHologram() {
return hologram;
}
public void setMembers(List<String> members) {
this.members = members;
}
public void setLines(List<String> lines) {
this.lines = lines;
}
public void setOwner(String owner) {
this.owner = owner;
updatePerm();
hologram.setAllowPlaceholders(allowPlaceholders);
}
public void setRawloc(String rawloc) {
this.rawloc = rawloc;
}
public void setRealloc(Location realloc) {
this.realloc = realloc;
}
public void setHologram(Hologram hologram) {
this.hologram = hologram;
}
public Location getCloc() {
return cloc;
}
public double getOffset() {
return offset;
}
public void setCloc(Location cloc) {
this.cloc = cloc;
}
public void setOffset(double offset) {
this.offset = offset;
}
public String getSkin() {
return skin;
}
public void setSkin(String skin) {
this.skin = skin;
updateSkinOnly();
}
public BlockFace getRotation() {
return rotation;
}
public void setRotation(BlockFace rotation) {
this.rotation = rotation;
}
public boolean isAllowPlaceholders() {
return allowPlaceholders;
}
public boolean isAllowColor() {
return allowColor;
}
public boolean isAllowItemLine() {
return allowItemLine;
}
public boolean isAllowCustomSkin() {
return allowCustomSkin;
}
public String getParticlename() {
return particlename;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.checkpointing;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.io.FilePathFilter;
import org.apache.flink.api.common.state.CheckpointListener;
import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.execution.SuppressRestartsException;
import org.apache.flink.streaming.api.checkpoint.ListCheckpointed;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.ContinuousFileMonitoringFunction;
import org.apache.flink.streaming.api.functions.source.FileProcessingMode;
import org.apache.flink.test.util.SuccessException;
import org.apache.flink.util.Collector;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Assert;
import org.junit.AssumptionViolatedException;
import org.junit.Before;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/** Test checkpointing while sourcing a continuous file processor. */
public class ContinuousFileProcessingCheckpointITCase extends StreamFaultToleranceTestBase {
private static final int NO_OF_FILES = 5;
private static final int LINES_PER_FILE = 150;
private static final long INTERVAL = 100;
private static File baseDir;
private static org.apache.hadoop.fs.FileSystem localFs;
private static String localFsURI;
private FileCreator fc;
private static Map<Integer, Set<String>> actualCollectedContent = new HashMap<>();
@Before
public void createHDFS() throws IOException {
if (failoverStrategy.equals(FailoverStrategy.RestartPipelinedRegionFailoverStrategy)) {
// TODO the 'NO_OF_RETRIES' is useless for current RestartPipelinedRegionStrategy,
// for this ContinuousFileProcessingCheckpointITCase, using
// RestartPipelinedRegionStrategy would result in endless running.
throw new AssumptionViolatedException(
"ignored ContinuousFileProcessingCheckpointITCase when using RestartPipelinedRegionStrategy");
}
baseDir = new File("./target/localfs/fs_tests").getAbsoluteFile();
FileUtil.fullyDelete(baseDir);
org.apache.hadoop.conf.Configuration hdConf = new org.apache.hadoop.conf.Configuration();
localFsURI = "file:///" + baseDir + "/";
localFs = new org.apache.hadoop.fs.Path(localFsURI).getFileSystem(hdConf);
}
@After
public void destroyHDFS() {
if (baseDir != null) {
FileUtil.fullyDelete(baseDir);
}
}
@Override
public void testProgram(StreamExecutionEnvironment env) {
env.enableCheckpointing(10);
// create and start the file creating thread.
fc = new FileCreator();
fc.start();
// create the monitoring source along with the necessary readers.
TextInputFormat format = new TextInputFormat(new org.apache.flink.core.fs.Path(localFsURI));
format.setFilesFilter(FilePathFilter.createDefaultFilter());
DataStream<String> inputStream =
env.readFile(format, localFsURI, FileProcessingMode.PROCESS_CONTINUOUSLY, INTERVAL);
TestingSinkFunction sink = new TestingSinkFunction();
inputStream
.flatMap(
new FlatMapFunction<String, String>() {
@Override
public void flatMap(String value, Collector<String> out)
throws Exception {
out.collect(value);
}
})
.addSink(sink)
.setParallelism(1);
}
@Override
public void postSubmit() throws Exception {
// be sure that the file creating thread is done.
fc.join();
Map<Integer, Set<String>> collected = actualCollectedContent;
Assert.assertEquals(collected.size(), fc.getFileContent().size());
for (Integer fileIdx : fc.getFileContent().keySet()) {
Assert.assertTrue(collected.keySet().contains(fileIdx));
List<String> cntnt = new ArrayList<>(collected.get(fileIdx));
Collections.sort(
cntnt,
new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return getLineNo(o1) - getLineNo(o2);
}
});
StringBuilder cntntStr = new StringBuilder();
for (String line : cntnt) {
cntntStr.append(line);
}
Assert.assertEquals(fc.getFileContent().get(fileIdx), cntntStr.toString());
}
collected.clear();
actualCollectedContent.clear();
fc.clean();
}
private int getLineNo(String line) {
String[] tkns = line.split("\\s");
return Integer.parseInt(tkns[tkns.length - 1]);
}
// -------------------------- Task Sink ------------------------------
private static class TestingSinkFunction extends RichSinkFunction<String>
implements ListCheckpointed<Tuple2<Long, Map<Integer, Set<String>>>>,
CheckpointListener {
private boolean hasRestoredAfterFailure;
private volatile int successfulCheckpoints;
private long elementsToFailure;
private long elementCounter;
private Map<Integer, Set<String>> actualContent = new HashMap<>();
TestingSinkFunction() {
hasRestoredAfterFailure = false;
elementCounter = 0;
successfulCheckpoints = 0;
}
@Override
public void open(Configuration parameters) throws Exception {
// this sink can only work with DOP 1
assertEquals(1, getRuntimeContext().getNumberOfParallelSubtasks());
long failurePosMin = (long) (0.4 * LINES_PER_FILE);
long failurePosMax = (long) (0.7 * LINES_PER_FILE);
elementsToFailure =
(new Random().nextLong() % (failurePosMax - failurePosMin)) + failurePosMin;
}
@Override
public void invoke(String value) throws Exception {
int fileIdx = getFileIdx(value);
Set<String> content = actualContent.get(fileIdx);
if (content == null) {
content = new HashSet<>();
actualContent.put(fileIdx, content);
}
// detect duplicate lines.
if (!content.add(value + "\n")) {
fail("Duplicate line: " + value);
System.exit(0);
}
elementCounter++;
// this is termination
if (elementCounter >= NO_OF_FILES * LINES_PER_FILE) {
actualCollectedContent = actualContent;
throw new SuppressRestartsException(new SuccessException());
}
// add some latency so that we have at least two checkpoint in
if (!hasRestoredAfterFailure && successfulCheckpoints < 2) {
Thread.sleep(5);
}
// simulate a node failure
if (!hasRestoredAfterFailure
&& successfulCheckpoints >= 2
&& elementCounter >= elementsToFailure) {
throw new Exception(
"Task Failure @ elem: " + elementCounter + " / " + elementsToFailure);
}
}
@Override
public List<Tuple2<Long, Map<Integer, Set<String>>>> snapshotState(
long checkpointId, long checkpointTimestamp) throws Exception {
Tuple2<Long, Map<Integer, Set<String>>> state =
new Tuple2<>(elementCounter, actualContent);
return Collections.singletonList(state);
}
@Override
public void restoreState(List<Tuple2<Long, Map<Integer, Set<String>>>> state)
throws Exception {
Tuple2<Long, Map<Integer, Set<String>>> s = state.get(0);
this.elementCounter = s.f0;
this.actualContent = s.f1;
this.hasRestoredAfterFailure =
this.elementCounter
!= 0; // because now restore is also called at initialization
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
this.successfulCheckpoints++;
}
@Override
public void notifyCheckpointAborted(long checkpointId) {}
private int getFileIdx(String line) {
String[] tkns = line.split(":");
return Integer.parseInt(tkns[0]);
}
}
// ------------------------- FILE CREATION -------------------------------
/**
* A separate thread creating {@link #NO_OF_FILES} files, one file every {@link #INTERVAL}
* milliseconds. It serves for testing the file monitoring functionality of the {@link
* ContinuousFileMonitoringFunction}. The files are filled with data by the {@link
* #fillWithData(String, String, int, String)} method.
*/
private class FileCreator extends Thread {
private final Set<Path> filesCreated = new HashSet<>();
private final Map<Integer, String> fileContents = new HashMap<>();
/** The modification time of the last created file. */
private long lastCreatedModTime = Long.MIN_VALUE;
public void run() {
try {
for (int i = 0; i < NO_OF_FILES; i++) {
Tuple2<org.apache.hadoop.fs.Path, String> tmpFile;
long modTime;
do {
// give it some time so that the files have
// different modification timestamps.
Thread.sleep(50);
tmpFile = fillWithData(localFsURI, "file", i, "This is test line.");
modTime = localFs.getFileStatus(tmpFile.f0).getModificationTime();
if (modTime <= lastCreatedModTime) {
// delete the last created file to recreate it with a different
// timestamp
localFs.delete(tmpFile.f0, false);
}
} while (modTime <= lastCreatedModTime);
lastCreatedModTime = modTime;
// rename the file
org.apache.hadoop.fs.Path file =
new org.apache.hadoop.fs.Path(localFsURI + "/file" + i);
localFs.rename(tmpFile.f0, file);
Assert.assertTrue(localFs.exists(file));
filesCreated.add(file);
fileContents.put(i, tmpFile.f1);
}
} catch (IOException | InterruptedException e) {
e.printStackTrace();
}
}
void clean() throws IOException {
assert (localFs != null);
for (org.apache.hadoop.fs.Path path : filesCreated) {
localFs.delete(path, false);
}
fileContents.clear();
}
Map<Integer, String> getFileContent() {
return this.fileContents;
}
}
/** Fill the file with content and put the content in the {@code hdPathContents} list. */
private Tuple2<Path, String> fillWithData(
String base, String fileName, int fileIdx, String sampleLine)
throws IOException, InterruptedException {
assert (localFs != null);
org.apache.hadoop.fs.Path tmp =
new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);
FSDataOutputStream stream = localFs.create(tmp);
StringBuilder str = new StringBuilder();
for (int i = 0; i < LINES_PER_FILE; i++) {
String line = fileIdx + ": " + sampleLine + " " + i + "\n";
str.append(line);
stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
}
stream.close();
return new Tuple2<>(tmp, str.toString());
}
}
| |
/*
* Copyright (c) 2015 mgm technology partners GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mgmtp.jfunk.data.source;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.mgmtp.jfunk.common.JFunkConstants;
import com.mgmtp.jfunk.common.exception.JFunkException;
import com.mgmtp.jfunk.common.util.Configuration;
import com.mgmtp.jfunk.common.util.ExtendedProperties;
import com.mgmtp.jfunk.common.util.Predicates;
import com.mgmtp.jfunk.data.DataSet;
/**
* Abstract {@link DataSource} implementation with functionality for handling fixed values.
*
*/
public abstract class BaseDataSource implements DataSource {
protected final Logger log = LoggerFactory.getLogger(getClass());
protected final Map<String, Map<String, String>> fixedValues = Maps.newHashMap();
protected final Configuration configuration;
private Set<String> formDataKeys;
private final String name;
private final Map<String, DataSet> currentDataSets = Maps.newHashMap();
/**
* Creates a new instance of the data source, reading fixed properties from the configuration.
*
* @param configuration
* Configuration for the data source.
*/
protected BaseDataSource(final Configuration configuration) {
this.name = StringUtils.uncapitalize(StringUtils.substringBefore(getClass().getSimpleName(), "DataSource"));
this.configuration = configuration;
}
protected Set<String> getFormDataKeys() {
try {
Map<String, String> view = Maps.filterKeys(configuration, Predicates.startsWith(JFunkConstants.FORM_DATA_PREFIX));
formDataKeys = Sets.newHashSetWithExpectedSize(view.size());
for (String key : view.keySet()) {
String fixedPropsFilenameKey = key;
int i = fixedPropsFilenameKey.lastIndexOf('.');
String dataKey = fixedPropsFilenameKey.substring(9, i);
formDataKeys.add(dataKey);
// Load fixed properties
String fixedPropsFilename = configuration.get(fixedPropsFilenameKey);
if (StringUtils.isNotEmpty(fixedPropsFilename)) {
InputStream is = null;
try {
ExtendedProperties fixedProps = new ExtendedProperties();
is = configuration.openStream(fixedPropsFilename);
fixedProps.load(is);
for (Entry<String, String> entry : fixedProps.entrySet()) {
setFixedValue(dataKey, entry.getKey(), entry.getValue());
}
} finally {
IOUtils.closeQuietly(is);
}
}
}
} catch (IOException ex) {
throw new JFunkException("Error loading form data keys.", ex);
}
return formDataKeys;
}
/**
* Returns the next {@link DataSet} for the specified key. Implementations must override this
* method.
*
* @param key
* The key.
* @return The {@link DataSet}, or null, if none is available.
*/
protected abstract DataSet getNextDataSetImpl(String key);
/**
* Returns the next {@link DataSet} for the specified key. Fixed values are applied to the
* result.
*
* @param key
* The key.
* @return The {@link DataSet}, or null, if none is available.
*/
@Override
public DataSet getNextDataSet(final String key) {
DataSet data = getNextDataSetImpl(key);
if (data != null) {
copyFixedValues(key, data);
currentDataSets.put(key, data);
}
return data;
}
@Override
public DataSet getCurrentDataSet(final String key) {
return currentDataSets.get(key);
}
@Override
public Map<String, DataSet> getCurrentDataSets() {
return currentDataSets;
}
private void copyFixedValues(final String key, final DataSet data) {
Map<String, String> map = fixedValues.get(key);
if (map != null) {
for (Entry<String, String> entry : map.entrySet()) {
data.setFixedValue(entry.getKey(), entry.getValue());
}
}
}
/**
* Resets (= removes) a specific fixed value from the specified {@link DataSet}.
*
* @param dataSetKey
* The {@link DataSet} key.
* @param entryKey
* The entry key.
*/
@Override
public void resetFixedValue(final String dataSetKey, final String entryKey) {
Map<String, String> map = fixedValues.get(dataSetKey);
if (map != null) {
if (map.remove(entryKey) == null) {
log.warn("Entry " + dataSetKey + "." + entryKey + " could not be found in map of fixed values");
}
if (map.isEmpty()) {
fixedValues.remove(dataSetKey);
}
}
DataSet dataSet = getCurrentDataSets().get(dataSetKey);
if (dataSet != null) {
dataSet.resetFixedValue(entryKey);
}
}
/**
* Resets (= removes) fixed values for the specified {@link DataSet} key.
*
* @param dataSetKey
* The {@link DataSet} key.
*/
@Override
public void resetFixedValues(final String dataSetKey) {
fixedValues.remove(dataSetKey);
DataSet dataSet = getCurrentDataSets().get(dataSetKey);
if (dataSet != null) {
dataSet.resetFixedValues();
}
}
/**
* Resets (= removes) all fixed values.
*/
@Override
public void resetFixedValues() {
fixedValues.clear();
for (DataSet ds : getCurrentDataSets().values()) {
ds.resetFixedValues();
}
}
/**
* Sets a fixed value.
*
* @param dataSetKey
* The {@link DataSet} key.
* @param entryKey
* The entry key.
* @param value
* The fixed value.
*/
@Override
public void setFixedValue(final String dataSetKey, final String entryKey, final String value) {
Map<String, String> map = fixedValues.get(dataSetKey);
if (map == null) {
map = Maps.newHashMap();
fixedValues.put(dataSetKey, map);
}
map.put(entryKey, value);
DataSet dataSet = getCurrentDataSet(dataSetKey);
if (dataSet != null) {
dataSet.setFixedValue(entryKey, value);
}
}
@Override
public String getName() {
return name;
}
@Override
public void copyDataSetKey(final String key, final String newKey) {
if (currentDataSets.containsKey(key)) {
DataSet data = currentDataSets.get(key);
currentDataSets.put(newKey, data);
}
}
@Override
public void removeDataSet(final String key) {
if (currentDataSets.containsKey(key)) {
currentDataSets.remove(key);
}
}
/**
* Derived classes have to reset themselves.
*/
protected abstract void doReset();
@Override
public void reset() {
formDataKeys = null;
currentDataSets.clear();
fixedValues.clear();
// Derived classes have to reset themselves
doReset();
log.debug("Finished reset");
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package backtype.storm.task;
import backtype.storm.generated.GlobalStreamId;
import backtype.storm.generated.Grouping;
import backtype.storm.generated.StormTopology;
import backtype.storm.hooks.ITaskHook;
import backtype.storm.metric.api.CombinedMetric;
import backtype.storm.metric.api.ICombiner;
import backtype.storm.metric.api.IMetric;
import backtype.storm.metric.api.IReducer;
import backtype.storm.metric.api.ReducedMetric;
import backtype.storm.state.ISubscribedState;
import backtype.storm.tuple.Fields;
import backtype.storm.utils.Utils;
import com.alibaba.jstorm.cluster.StormClusterState;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.NotImplementedException;
import org.json.simple.JSONValue;
/**
* A TopologyContext is given to bolts and spouts in their "prepare" and "open" methods, respectively.
* This object provides information about the component's
* place within the topology, such as task ids, inputs and outputs, etc.
*
* The TopologyContext is also used to declare ISubscribedState objects to synchronize state with StateSpouts
* this object is subscribed to.
*/
public class TopologyContext extends WorkerTopologyContext implements IMetricsContext {
private Integer _taskId;
private Map<String, Object> _taskData = new HashMap<>();
private List<ITaskHook> _hooks = new ArrayList<>();
private Map<String, Object> _executorData;
private Map<Integer, Map<Integer, Map<String, IMetric>>> _registeredMetrics;
private clojure.lang.Atom _openOrPrepareWasCalled;
private StormClusterState _zkCluster;
public TopologyContext(StormTopology topology, Map stormConf, Map<Integer, String> taskToComponent,
Map<String, List<Integer>> componentToSortedTasks,
Map<String, Map<String, Fields>> componentToStreamToFields,
String stormId, String codeDir, String workerId, Integer taskId, Integer workerPort,
List<Integer> workerTasks, Map<String, Object> defaultResources,
Map<String, Object> userResources, Map<String, Object> executorData,
Map registeredMetrics, clojure.lang.Atom openOrPrepareWasCalled, StormClusterState zkCluster) {
super(topology, stormConf, taskToComponent, componentToSortedTasks, componentToStreamToFields,
stormId, codeDir, workerId, workerPort, workerTasks,
defaultResources, userResources);
_taskId = taskId;
_executorData = executorData;
_registeredMetrics = registeredMetrics;
_openOrPrepareWasCalled = openOrPrepareWasCalled;
_zkCluster = zkCluster;
}
/**
* All state from all subscribed state spouts streams will be synced with the provided object.
*
* It is recommended that your ISubscribedState object is kept as an instance variable of this object.
* The recommended usage of this method is as follows:
*
* <pre>
* _myState = context.setAllSubscribedState(new MyState());
* </pre>
*
* @param obj Provided ISubscribedState implementation
* @return Returns the ISubscribedState object provided
*/
public <T extends ISubscribedState> T setAllSubscribedState(T obj) {
// check that only subscribed to one component/stream for statespout setsubscribedstate appropriately
throw new NotImplementedException();
}
/**
* Synchronizes the default stream from the specified state spout component id with the provided ISubscribedState object.
*
* The recommended usage of this method is as follows:
* <pre>
* _myState = context.setSubscribedState(componentId, new MyState());
* </pre>
*
* @param componentId the id of the StateSpout component to subscribe to
* @param obj Provided ISubscribedState implementation
* @return Returns the ISubscribedState object provided
*/
public <T extends ISubscribedState> T setSubscribedState(String componentId, T obj) {
return setSubscribedState(componentId, Utils.DEFAULT_STREAM_ID, obj);
}
/**
* Synchronizes the specified stream from the specified state spout component id with the provided ISubscribedState object.
*
* The recommended usage of this method is as follows:
* <pre>
* _myState = context.setSubscribedState(componentId, streamId, new MyState());
* </pre>
*
* @param componentId the id of the StateSpout component to subscribe to
* @param streamId the stream to subscribe to
* @param obj Provided ISubscribedState implementation
* @return Returns the ISubscribedState object provided
*/
public <T extends ISubscribedState> T setSubscribedState(String componentId, String streamId, T obj) {
throw new NotImplementedException();
}
/**
* Gets the task id of this task.
*
* @return the task id
*/
public int getThisTaskId() {
return _taskId;
}
/**
* Gets the component id for this task. The component id maps to a component id specified for a Spout or Bolt
* in the topology definition.
*/
public String getThisComponentId() {
return getComponentId(_taskId);
}
/**
* Gets the declared output fields for the specified stream id for the component this task is a part of.
*/
public Fields getThisOutputFields(String streamId) {
return getComponentOutputFields(getThisComponentId(), streamId);
}
/**
* Gets the declared output fields for the specified stream id for the component this task is a part of.
*/
public Map<String, List<String>> getThisOutputFieldsForStreams() {
Map<String, List<String>> streamToFields = new HashMap<>();
for (String stream : this.getThisStreams()) {
streamToFields.put(stream, this.getThisOutputFields(stream).toList());
}
return streamToFields;
}
/**
* Gets the set of streams declared for the component of this task.
*/
public Set<String> getThisStreams() {
return getComponentStreams(getThisComponentId());
}
/**
* Gets the index of this task id in getComponentTasks(getThisComponentId()).
* An example use case for this method is determining which task accesses which
* resource in a distributed resource to ensure an even distribution.
*/
public int getThisTaskIndex() {
List<Integer> tasks = new ArrayList<>(getComponentTasks(getThisComponentId()));
Collections.sort(tasks);
for (int i = 0; i < tasks.size(); i++) {
if (tasks.get(i) == getThisTaskId()) {
return i;
}
}
throw new RuntimeException("Fatal: could not find this task id in this component");
}
/**
* Gets the declared inputs to this component.
*
* @return A map from subscribed component/stream to the grouping subscribed with.
*/
public Map<GlobalStreamId, Grouping> getThisSources() {
return getSources(getThisComponentId());
}
public Map<String, List<Integer>> getThisSourceComponentTasks() {
Map<String, List<Integer>> ret = new HashMap<>();
Map<GlobalStreamId, Grouping> sources = getThisSources();
Set<String> sourceComponents = new HashSet<>();
if (sources != null) {
for (GlobalStreamId globalStreamId : sources.keySet()) {
sourceComponents.add(globalStreamId.get_componentId());
}
}
for (String component : sourceComponents) {
ret.put(component, getComponentTasks(component));
}
return ret;
}
/**
* Gets information about who is consuming the outputs of this component, and how.
*
* @return Map from stream id to component id to the Grouping used.
*/
public Map<String, Map<String, Grouping>> getThisTargets() {
return getTargets(getThisComponentId());
}
public Map<String, List<Integer>> getThisTargetComponentTasks() {
Map<String, Map<String, Grouping>> outputGroupings = getThisTargets();
Map<String, List<Integer>> ret = new HashMap<>();
Set<String> targetComponents = new HashSet<>();
for (Map.Entry<String, Map<String, Grouping>> entry : outputGroupings.entrySet()) {
Map<String, Grouping> componentGrouping = entry.getValue();
targetComponents.addAll(componentGrouping.keySet());
}
for (String component : targetComponents) {
ret.put(component, getComponentTasks(component));
}
return ret;
}
public void setTaskData(String name, Object data) {
_taskData.put(name, data);
}
public Object getTaskData(String name) {
return _taskData.get(name);
}
public void setExecutorData(String name, Object data) {
_executorData.put(name, data);
}
public Object getExecutorData(String name) {
return _executorData.get(name);
}
public void addTaskHook(ITaskHook hook) {
hook.prepare(_stormConf, this);
_hooks.add(hook);
}
public Collection<ITaskHook> getHooks() {
return _hooks;
}
@SuppressWarnings("unchecked")
private static Map<String, Object> groupingToJSONableMap(Grouping grouping) {
Map groupingMap = new HashMap<>();
groupingMap.put("type", grouping.getSetField().toString());
if (grouping.is_set_fields()) {
groupingMap.put("fields", grouping.get_fields());
}
return groupingMap;
}
@SuppressWarnings("unchecked")
@Override
public String toJSONString() {
Map obj = new HashMap();
obj.put("task->component", this.getTaskToComponent());
obj.put("taskid", this.getThisTaskId());
obj.put("componentid", this.getThisComponentId());
List<String> streamList = new ArrayList<>();
streamList.addAll(this.getThisStreams());
obj.put("streams", streamList);
obj.put("stream->outputfields", this.getThisOutputFieldsForStreams());
// Convert targets to a JSON serializable format
Map<String, Map> stringTargets = new HashMap<>();
for (Map.Entry<String, Map<String, Grouping>> entry : this.getThisTargets().entrySet()) {
Map stringTargetMap = new HashMap<>();
for (Map.Entry<String, Grouping> innerEntry : entry.getValue().entrySet()) {
stringTargetMap.put(innerEntry.getKey(), groupingToJSONableMap(innerEntry.getValue()));
}
stringTargets.put(entry.getKey(), stringTargetMap);
}
obj.put("stream->target->grouping", stringTargets);
// Convert sources to a JSON serializable format
Map<String, Map<String, Object>> stringSources = new HashMap<>();
for (Map.Entry<GlobalStreamId, Grouping> entry : this.getThisSources().entrySet()) {
GlobalStreamId gid = entry.getKey();
Map<String, Object> stringSourceMap = stringSources.get(gid.get_componentId());
if (stringSourceMap == null) {
stringSourceMap = new HashMap<>();
stringSources.put(gid.get_componentId(), stringSourceMap);
}
stringSourceMap.put(gid.get_streamId(), groupingToJSONableMap(entry.getValue()));
}
obj.put("source->stream->grouping", stringSources);
return JSONValue.toJSONString(obj);
}
/*
* Register a IMetric instance. Storm will then call getValueAndReset on the metric every timeBucketSizeInSecs and the returned value is sent to all metrics
* consumers. You must call this during IBolt::prepare or ISpout::open.
*
* @return The IMetric argument unchanged.
*/
@SuppressWarnings("unchecked")
public <T extends IMetric> T registerMetric(String name, T metric, int timeBucketSizeInSecs) {
if ((Boolean) _openOrPrepareWasCalled.deref()) {
throw new RuntimeException("TopologyContext.registerMetric can only be called from within overridden "
+ "IBolt::prepare() or ISpout::open() method.");
}
if (metric == null) {
throw new IllegalArgumentException("Cannot register a null metric");
}
if (timeBucketSizeInSecs <= 0) {
throw new IllegalArgumentException("TopologyContext.registerMetric can only be called with timeBucketSizeInSecs "
+ "greater than or equal to 1 second.");
}
if (getRegisteredMetricByName(name) != null) {
throw new RuntimeException("The same metric name `" + name + "` was registered twice.");
}
Map m1 = _registeredMetrics;
if (!m1.containsKey(timeBucketSizeInSecs)) {
m1.put(timeBucketSizeInSecs, new HashMap());
}
Map m2 = (Map) m1.get(timeBucketSizeInSecs);
if (!m2.containsKey(_taskId)) {
m2.put(_taskId, new HashMap());
}
Map m3 = (Map) m2.get(_taskId);
if (m3.containsKey(name)) {
throw new RuntimeException("The same metric name `" + name + "` was registered twice.");
} else {
m3.put(name, metric);
}
return metric;
}
/**
* Get component's metric from registered metrics by name. Notice: Normally,
* one component can only register one metric name once. But now registerMetric
* has a bug(https://issues.apache.org/jira/browse/STORM-254) cause the same metric name can register twice.
* So we just return the first metric we meet.
*/
public IMetric getRegisteredMetricByName(String name) {
IMetric metric = null;
for (Map<Integer, Map<String, IMetric>> taskIdToNameToMetric : _registeredMetrics.values()) {
Map<String, IMetric> nameToMetric = taskIdToNameToMetric.get(_taskId);
if (nameToMetric != null) {
metric = nameToMetric.get(name);
if (metric != null) {
// we just return the first metric we meet
break;
}
}
}
return metric;
}
/*
* helper method for registering ReducedMetric.
*/
public ReducedMetric registerMetric(String name, IReducer reducer, int timeBucketSizeInSecs) {
return registerMetric(name, new ReducedMetric(reducer), timeBucketSizeInSecs);
}
/*
* helper method for registering CombinedMetric.
*/
public CombinedMetric registerMetric(String name, ICombiner combiner, int timeBucketSizeInSecs) {
return registerMetric(name, new CombinedMetric(combiner), timeBucketSizeInSecs);
}
public StormClusterState getZkCluster() {
return _zkCluster;
}
/*
* Task error report callback
* */
public void reportError(String errorMsg) throws Exception {
_zkCluster.report_task_error(getTopologyId(), _taskId, errorMsg);
}
public void applyHooks(String methodName, Object object) throws Exception {
for (ITaskHook taskHook : _hooks) {
Class clazz = taskHook.getClass();
Method method = clazz.getDeclaredMethod(methodName, object.getClass());
method.invoke(taskHook, object);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.compile;
import java.sql.ParameterMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
import org.apache.phoenix.expression.Determinism;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.expression.LiteralExpression;
import org.apache.phoenix.expression.RowKeyColumnExpression;
import org.apache.phoenix.iterate.ResultIterator;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixParameterMetaData;
import org.apache.phoenix.jdbc.PhoenixStatement;
import org.apache.phoenix.metrics.MetricInfo;
import org.apache.phoenix.parse.FilterableStatement;
import org.apache.phoenix.parse.LiteralParseNode;
import org.apache.phoenix.parse.ParseNodeFactory;
import org.apache.phoenix.parse.TraceStatement;
import org.apache.phoenix.query.KeyRange;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PColumnImpl;
import org.apache.phoenix.schema.PNameFactory;
import org.apache.phoenix.schema.RowKeyValueAccessor;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.TableRef;
import org.apache.phoenix.schema.tuple.ResultTuple;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.trace.util.Tracing;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.SizedUtil;
import org.cloudera.htrace.Sampler;
import org.cloudera.htrace.TraceScope;
public class TraceQueryPlan implements QueryPlan {
private TraceStatement traceStatement = null;
private PhoenixStatement stmt = null;
private StatementContext context = null;
private boolean first = true;
private static final RowProjector TRACE_PROJECTOR;
static {
List<ExpressionProjector> projectedColumns = new ArrayList<ExpressionProjector>();
PColumn column =
new PColumnImpl(PNameFactory.newName(MetricInfo.TRACE.columnName), null,
PLong.INSTANCE, null, null, false, 0, SortOrder.getDefault(), 0, null,
false, null);
List<PColumn> columns = new ArrayList<PColumn>();
columns.add(column);
Expression expression =
new RowKeyColumnExpression(column, new RowKeyValueAccessor(columns, 0));
projectedColumns.add(new ExpressionProjector(MetricInfo.TRACE.columnName, "", expression,
true));
int estimatedByteSize = SizedUtil.KEY_VALUE_SIZE + PLong.INSTANCE.getByteSize();
TRACE_PROJECTOR = new RowProjector(projectedColumns, estimatedByteSize, false);
}
public TraceQueryPlan(TraceStatement traceStatement, PhoenixStatement stmt) {
this.traceStatement = traceStatement;
this.stmt = stmt;
this.context = new StatementContext(stmt);
}
@Override
public StatementContext getContext() {
return this.context;
}
@Override
public ParameterMetaData getParameterMetaData() {
return PhoenixParameterMetaData.EMPTY_PARAMETER_META_DATA;
}
@Override
public ExplainPlan getExplainPlan() throws SQLException {
return ExplainPlan.EMPTY_PLAN;
}
@Override
public ResultIterator iterator() throws SQLException {
final PhoenixConnection conn = stmt.getConnection();
if (conn.getTraceScope() == null && !traceStatement.isTraceOn()) {
return ResultIterator.EMPTY_ITERATOR;
}
return new ResultIterator() {
@Override
public void close() throws SQLException {
}
@Override
public Tuple next() throws SQLException {
if(!first) return null;
TraceScope traceScope = conn.getTraceScope();
if(traceStatement.isTraceOn()) {
if(!conn.getSampler().equals(Sampler.ALWAYS)) {
conn.setSampler(Sampler.ALWAYS);
}
if (traceScope == null) {
traceScope = Tracing.startNewSpan(conn, "Enabling trace");
conn.setTraceScope(traceScope);
}
} else {
if (traceScope != null) {
conn.getTraceScope().close();
conn.setTraceScope(null);
}
conn.setSampler(Sampler.NEVER);
}
if(traceScope == null) return null;
first = false;
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
ParseNodeFactory factory = new ParseNodeFactory();
LiteralParseNode literal =
factory.literal(traceScope.getSpan().getTraceId());
LiteralExpression expression =
LiteralExpression.newConstant(literal.getValue(), PLong.INSTANCE,
Determinism.ALWAYS);
expression.evaluate(null, ptr);
byte[] rowKey = ByteUtil.copyKeyBytesIfNecessary(ptr);
Cell cell =
CellUtil.createCell(rowKey, HConstants.EMPTY_BYTE_ARRAY,
HConstants.EMPTY_BYTE_ARRAY, System.currentTimeMillis(),
Type.Put.getCode(), HConstants.EMPTY_BYTE_ARRAY);
List<Cell> cells = new ArrayList<Cell>(1);
cells.add(cell);
return new ResultTuple(Result.create(cells));
}
@Override
public void explain(List<String> planSteps) {
}
};
}
@Override
public long getEstimatedSize() {
return PLong.INSTANCE.getByteSize();
}
@Override
public TableRef getTableRef() {
return null;
}
@Override
public RowProjector getProjector() {
return TRACE_PROJECTOR;
}
@Override
public Integer getLimit() {
return null;
}
@Override
public OrderBy getOrderBy() {
return OrderBy.EMPTY_ORDER_BY;
}
@Override
public GroupBy getGroupBy() {
return GroupBy.EMPTY_GROUP_BY;
}
@Override
public List<KeyRange> getSplits() {
return Collections.emptyList();
}
@Override
public List<List<Scan>> getScans() {
return Collections.emptyList();
}
@Override
public FilterableStatement getStatement() {
return null;
}
@Override
public boolean isDegenerate() {
return false;
}
@Override
public boolean isRowKeyOrdered() {
return false;
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionInputFileCache;
import com.google.devtools.build.lib.actions.ActionInputHelper;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander;
import com.google.devtools.build.lib.actions.ResourceSet;
import com.google.devtools.build.lib.actions.SimpleSpawn;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.exec.SpawnInputExpander;
import com.google.devtools.build.lib.exec.SpawnResult;
import com.google.devtools.build.lib.exec.SpawnRunner.ProgressStatus;
import com.google.devtools.build.lib.exec.SpawnRunner.SpawnExecutionPolicy;
import com.google.devtools.build.lib.exec.util.FakeOwner;
import com.google.devtools.build.lib.remote.RemoteProtocol.ActionResult;
import com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecuteReply;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecuteRequest;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecutionCacheReply;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecutionCacheRequest;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecutionCacheStatus;
import com.google.devtools.build.lib.remote.RemoteProtocol.ExecutionStatus;
import com.google.devtools.build.lib.util.io.FileOutErr;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import com.google.devtools.common.options.Options;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.SortedMap;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
/** Tests for {@link RemoteSpawnRunner} in combination with {@link GrpcRemoteExecutor}. */
@RunWith(JUnit4.class)
public class GrpcRemoteExecutionClientTest {
private static final ArtifactExpander SIMPLE_ARTIFACT_EXPANDER = new ArtifactExpander() {
@Override
public void expand(Artifact artifact, Collection<? super Artifact> output) {
output.add(artifact);
}
};
private FileSystem fs;
private Path execRoot;
private SimpleSpawn simpleSpawn;
private FakeActionInputFileCache fakeFileCache;
private FileOutErr outErr;
private long timeoutMillis = 0;
private final SpawnExecutionPolicy simplePolicy = new SpawnExecutionPolicy() {
@Override
public boolean shouldPrefetchInputsForLocalExecution(Spawn spawn) {
throw new UnsupportedOperationException();
}
@Override
public void lockOutputFiles() throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public ActionInputFileCache getActionInputFileCache() {
return fakeFileCache;
}
@Override
public long getTimeoutMillis() {
return timeoutMillis;
}
@Override
public FileOutErr getFileOutErr() {
return outErr;
}
@Override
public SortedMap<PathFragment, ActionInput> getInputMapping() throws IOException {
return new SpawnInputExpander(/*strict*/false)
.getInputMapping(simpleSpawn, SIMPLE_ARTIFACT_EXPANDER, fakeFileCache, "workspace");
}
@Override
public void report(ProgressStatus state) {
// TODO(ulfjack): Test that the right calls are made.
}
};
@Before
public final void setUp() throws Exception {
fs = new InMemoryFileSystem();
execRoot = fs.getPath("/exec/root");
FileSystemUtils.createDirectoryAndParents(execRoot);
fakeFileCache = new FakeActionInputFileCache(execRoot);
simpleSpawn = new SimpleSpawn(
new FakeOwner("Mnemonic", "Progress Message"),
ImmutableList.of("/bin/echo", "Hi!"),
ImmutableMap.of("VARIABLE", "value"),
/*executionInfo=*/ImmutableMap.<String, String>of(),
/*inputs=*/ImmutableList.of(ActionInputHelper.fromPath("input")),
/*outputs=*/ImmutableList.<ActionInput>of(),
ResourceSet.ZERO
);
Path stdout = fs.getPath("/tmp/stdout");
Path stderr = fs.getPath("/tmp/stderr");
FileSystemUtils.createDirectoryAndParents(stdout.getParentDirectory());
FileSystemUtils.createDirectoryAndParents(stderr.getParentDirectory());
outErr = new FileOutErr(stdout, stderr);
}
private void scratch(ActionInput input, String content) throws IOException {
Path inputFile = execRoot.getRelative(input.getExecPath());
FileSystemUtils.writeContentAsLatin1(inputFile, content);
fakeFileCache.setDigest(
simpleSpawn.getInputFiles().get(0), ByteString.copyFrom(inputFile.getSHA1Digest()));
}
@Test
public void cacheHit() throws Exception {
GrpcCasInterface casIface = Mockito.mock(GrpcCasInterface.class);
GrpcExecutionCacheInterface cacheIface = Mockito.mock(GrpcExecutionCacheInterface.class);
GrpcExecutionInterface executionIface = Mockito.mock(GrpcExecutionInterface.class);
RemoteOptions options = Options.getDefaults(RemoteOptions.class);
GrpcRemoteExecutor executor =
new GrpcRemoteExecutor(options, casIface, cacheIface, executionIface);
RemoteSpawnRunner client = new RemoteSpawnRunner(execRoot, options, executor);
scratch(simpleSpawn.getInputFiles().get(0), "xyz");
ExecutionCacheReply reply = ExecutionCacheReply.newBuilder()
.setStatus(ExecutionCacheStatus.newBuilder().setSucceeded(true))
.setResult(ActionResult.newBuilder().setReturnCode(0))
.build();
when(cacheIface.getCachedResult(any(ExecutionCacheRequest.class))).thenReturn(reply);
SpawnResult result = client.exec(simpleSpawn, simplePolicy);
verify(cacheIface).getCachedResult(any(ExecutionCacheRequest.class));
assertThat(result.setupSuccess()).isTrue();
assertThat(result.exitCode()).isEqualTo(0);
assertThat(outErr.hasRecordedOutput()).isFalse();
assertThat(outErr.hasRecordedStderr()).isFalse();
}
@Test
public void cacheHitWithOutput() throws Exception {
InMemoryCas casIface = new InMemoryCas();
GrpcExecutionCacheInterface cacheIface = Mockito.mock(GrpcExecutionCacheInterface.class);
GrpcExecutionInterface executionIface = Mockito.mock(GrpcExecutionInterface.class);
RemoteOptions options = Options.getDefaults(RemoteOptions.class);
GrpcRemoteExecutor executor =
new GrpcRemoteExecutor(options, casIface, cacheIface, executionIface);
RemoteSpawnRunner client = new RemoteSpawnRunner(execRoot, options, executor);
scratch(simpleSpawn.getInputFiles().get(0), "xyz");
byte[] cacheStdOut = "stdout".getBytes(StandardCharsets.UTF_8);
byte[] cacheStdErr = "stderr".getBytes(StandardCharsets.UTF_8);
ContentDigest stdOutDigest = casIface.put(cacheStdOut);
ContentDigest stdErrDigest = casIface.put(cacheStdErr);
ExecutionCacheReply reply = ExecutionCacheReply.newBuilder()
.setStatus(ExecutionCacheStatus.newBuilder().setSucceeded(true))
.setResult(ActionResult.newBuilder()
.setReturnCode(0)
.setStdoutDigest(stdOutDigest)
.setStderrDigest(stdErrDigest))
.build();
when(cacheIface.getCachedResult(any(ExecutionCacheRequest.class))).thenReturn(reply);
SpawnResult result = client.exec(simpleSpawn, simplePolicy);
verify(cacheIface).getCachedResult(any(ExecutionCacheRequest.class));
assertThat(result.setupSuccess()).isTrue();
assertThat(result.exitCode()).isEqualTo(0);
assertThat(outErr.outAsLatin1()).isEqualTo("stdout");
assertThat(outErr.errAsLatin1()).isEqualTo("stderr");
}
@Test
public void remotelyExecute() throws Exception {
InMemoryCas casIface = new InMemoryCas();
GrpcExecutionCacheInterface cacheIface = Mockito.mock(GrpcExecutionCacheInterface.class);
GrpcExecutionInterface executionIface = Mockito.mock(GrpcExecutionInterface.class);
RemoteOptions options = Options.getDefaults(RemoteOptions.class);
GrpcRemoteExecutor executor =
new GrpcRemoteExecutor(options, casIface, cacheIface, executionIface);
RemoteSpawnRunner client = new RemoteSpawnRunner(execRoot, options, executor);
scratch(simpleSpawn.getInputFiles().get(0), "xyz");
byte[] cacheStdOut = "stdout".getBytes(StandardCharsets.UTF_8);
byte[] cacheStdErr = "stderr".getBytes(StandardCharsets.UTF_8);
ContentDigest stdOutDigest = casIface.put(cacheStdOut);
ContentDigest stdErrDigest = casIface.put(cacheStdErr);
ExecutionCacheReply reply = ExecutionCacheReply.newBuilder()
.setStatus(ExecutionCacheStatus.newBuilder().setSucceeded(true))
.build();
when(cacheIface.getCachedResult(any(ExecutionCacheRequest.class))).thenReturn(reply);
when(executionIface.execute(any(ExecuteRequest.class))).thenReturn(ImmutableList.of(
ExecuteReply.newBuilder()
.setStatus(ExecutionStatus.newBuilder().setSucceeded(true))
.setResult(ActionResult.newBuilder()
.setReturnCode(0)
.setStdoutDigest(stdOutDigest)
.setStderrDigest(stdErrDigest))
.build()).iterator());
SpawnResult result = client.exec(simpleSpawn, simplePolicy);
verify(cacheIface).getCachedResult(any(ExecutionCacheRequest.class));
assertThat(result.setupSuccess()).isTrue();
assertThat(result.exitCode()).isEqualTo(0);
assertThat(outErr.outAsLatin1()).isEqualTo("stdout");
assertThat(outErr.errAsLatin1()).isEqualTo("stderr");
}
}
| |
package com.alxgrk.ressign.hateoas;
import static com.alxgrk.ressign.TestConfig.USER_ONE_ID;
import static com.alxgrk.ressign.TestConfig.USER_TWO_ID;
import static org.junit.Assert.assertNotNull;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import javax.transaction.Transactional;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import com.alxgrk.ressign.RessignApplication;
import com.alxgrk.ressign.TestConfig;
import com.alxgrk.ressign.authentication.Auth0JwtExtractor;
import com.alxgrk.ressign.hateoas.mediatype.MediaTypes;
import com.alxgrk.ressign.hateoas.rels.Rels;
import com.alxgrk.ressign.models.CachedUser;
import com.alxgrk.ressign.models.Organization;
import com.alxgrk.ressign.models.Resource;
import com.alxgrk.ressign.repos.UserCache;
import com.alxgrk.ressign.repos.UserRepository;
import com.alxgrk.ressign.repos.OrganizationRepository;
import com.alxgrk.ressign.repos.ResourceRepository;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = { RessignApplication.class, TestConfig.class })
@WebAppConfiguration
@ActiveProfiles("test")
public class JourneyTests {
private static final List<String> attachAndDetachResourceToUserJourney = Lists.newArrayList(
Rels.RESOURCES,
Rels.ADMINISTRATORS,
Rels.ATTACH,
Rels.DETACH);
private MockMvc mockMvc;
@SuppressWarnings("rawtypes")
private HttpMessageConverter mappingJackson2HttpMessageConverter;
@Autowired
private WebApplicationContext context;
@Autowired
private UserRepository userRepository;
@Autowired
private UserCache userCache;
@Autowired
private OrganizationRepository organizationRepository;
@Autowired
private ResourceRepository resourceRepository;
private String password = "password";
private String userNameOne = "testuser1";
private String userNameTwo = "testuser2";
private CachedUser accountOne;
private CachedUser accountTwo;
private Organization org;
private String orgName = "org";
private Resource res;
private String resName = "res";
private String orgId;
private String resId;
@Autowired
void setConverters(HttpMessageConverter<?>[] converters) {
this.mappingJackson2HttpMessageConverter = Arrays.asList(converters).stream().filter(
hmc -> hmc instanceof MappingJackson2HttpMessageConverter).findAny().orElse(null);
assertNotNull("the JSON message converter must not be null",
this.mappingJackson2HttpMessageConverter);
}
@Before
public void setup() throws Exception {
this.mockMvc = MockMvcBuilders.webAppContextSetup(context).build();
this.userCache.deleteAllInBatch();
org = new Organization().setName(orgName);
res = new Resource().setName(resName);
org = organizationRepository.save(org);
res = resourceRepository.save(res);
orgId = org.getId();
resId = res.getId();
accountOne = new CachedUser(USER_ONE_ID)
.setIsAdmin(true)
.setUsername(userNameOne)
.setPassword(password)
.setSurname(userNameOne)
.setName(userNameOne)
.setOrganization(org)
.setConnectedResources(Sets.newHashSet(res));
userRepository.store(accountOne);
accountOne = userCache.save(accountOne);
accountTwo = new CachedUser(USER_TWO_ID)
.setUsername(userNameTwo)
.setPassword(password)
.setSurname(userNameTwo)
.setName(userNameTwo);
userRepository.store(accountTwo);
accountTwo = userCache.save(accountTwo);
}
@Test
@Transactional
public void testAttachAndDetachResourceToUserJourney() throws Exception {
String entryPoint = "/";
String firstHref = "/resources";
String secondHref = "/resources/" + resId + "/administrators";
String thirdHref = "/resources/" + resId + "/administrators";
String fourthHref = "/resources/" + resId + "/administrators/" + USER_TWO_ID;
ResultActions perform = mockMvc.perform(get(entryPoint)
.header(Auth0JwtExtractor.AUTHORIZATION, TestConfig.USER_ONE_TOKEN));
perform
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.ROOT_TYPE + ";charset=UTF-8"))
.andExpect(jsonPath("$._links[4].rel")
.value(attachAndDetachResourceToUserJourney.get(0)));
mockMvc.perform(get(firstHref)
.header(Auth0JwtExtractor.AUTHORIZATION, TestConfig.USER_ONE_TOKEN))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.RESOURCE_TYPE + ";charset=UTF-8"))
.andExpect(jsonPath("$.members[0].name").value(resName))
.andExpect(jsonPath("$.members[0]._links[1].rel")
.value(attachAndDetachResourceToUserJourney.get(1)));
mockMvc.perform(get(secondHref)
.header(Auth0JwtExtractor.AUTHORIZATION, TestConfig.USER_ONE_TOKEN))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.ACCOUNT_TYPE + ";charset=UTF-8"))
.andExpect(jsonPath("$._links[1].rel")
.value(attachAndDetachResourceToUserJourney.get(2)));
mockMvc.perform(post(thirdHref)
.header(Auth0JwtExtractor.AUTHORIZATION, TestConfig.USER_ONE_TOKEN)
.param("username", userNameTwo))
.andExpect(status().isCreated());
mockMvc.perform(get(secondHref)
.header(Auth0JwtExtractor.AUTHORIZATION, TestConfig.USER_ONE_TOKEN))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaTypes.ACCOUNT_TYPE + ";charset=UTF-8"))
.andExpect(jsonPath("$.members[0].username").value(userNameTwo))
.andExpect(jsonPath("$.members[0]._links[1].rel")
.value(attachAndDetachResourceToUserJourney.get(3)));
mockMvc.perform(delete(fourthHref)
.header(Auth0JwtExtractor.AUTHORIZATION, TestConfig.USER_ONE_TOKEN))
.andExpect(status().isNoContent());
}
@After
public void tearDown() throws Exception {
List<String> ids = userRepository.findAll()
.stream()
.map(CachedUser::getId)
.collect(Collectors.toList());
ids.forEach(userRepository::delete);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.conf;
import org.apache.giraph.aggregators.AggregatorWriter;
import org.apache.giraph.combiner.MessageCombiner;
import org.apache.giraph.edge.OutEdges;
import org.apache.giraph.edge.ReuseObjectsOutEdges;
import org.apache.giraph.factories.ComputationFactory;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.graph.VertexValueCombiner;
import org.apache.giraph.graph.VertexResolver;
import org.apache.giraph.factories.VertexValueFactory;
import org.apache.giraph.graph.Computation;
import org.apache.giraph.io.EdgeInputFormat;
import org.apache.giraph.io.EdgeOutputFormat;
import org.apache.giraph.io.VertexInputFormat;
import org.apache.giraph.io.VertexOutputFormat;
import org.apache.giraph.io.filters.EdgeInputFilter;
import org.apache.giraph.io.filters.VertexInputFilter;
import org.apache.giraph.job.GiraphJobObserver;
import org.apache.giraph.job.GiraphJobRetryChecker;
import org.apache.giraph.master.MasterCompute;
import org.apache.giraph.master.MasterObserver;
import org.apache.giraph.partition.GraphPartitionerFactory;
import org.apache.giraph.partition.Partition;
import org.apache.giraph.partition.ReusesObjectsPartition;
import org.apache.giraph.utils.ReflectionUtils;
import org.apache.giraph.worker.WorkerContext;
import org.apache.giraph.worker.WorkerObserver;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.net.DNS;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.UnpooledByteBufAllocator;
import java.net.UnknownHostException;
/**
* Adds user methods specific to Giraph. This will be put into an
* ImmutableClassesGiraphConfiguration that provides the configuration plus
* the immutable classes.
*
* Keeps track of parameters which were set so it easily set them in another
* copy of configuration.
*/
public class GiraphConfiguration extends Configuration
implements GiraphConstants {
/** ByteBufAllocator to be used by netty */
private ByteBufAllocator nettyBufferAllocator = null;
/**
* Constructor that creates the configuration
*/
public GiraphConfiguration() {
configureHadoopSecurity();
}
/**
* Constructor.
*
* @param conf Configuration
*/
public GiraphConfiguration(Configuration conf) {
super(conf);
configureHadoopSecurity();
}
/**
* Get name of computation being run. We leave this up to the
* {@link ComputationFactory} to decide what to return.
*
* @return Name of computation being run
*/
public String getComputationName() {
ComputationFactory compFactory = ReflectionUtils.newInstance(
getComputationFactoryClass());
return compFactory.computationName(this);
}
/**
* Get the user's subclassed {@link ComputationFactory}
*
* @return User's computation factory class
*/
public Class<? extends ComputationFactory> getComputationFactoryClass() {
return COMPUTATION_FACTORY_CLASS.get(this);
}
/**
* Get the user's subclassed {@link Computation}
*
* @return User's computation class
*/
public Class<? extends Computation> getComputationClass() {
return COMPUTATION_CLASS.get(this);
}
/**
* Set the computation class (required)
*
* @param computationClass Runs vertex computation
*/
public void setComputationClass(
Class<? extends Computation> computationClass) {
COMPUTATION_CLASS.set(this, computationClass);
}
/**
* Set the vertex value factory class
*
* @param vertexValueFactoryClass Creates default vertex values
*/
public final void setVertexValueFactoryClass(
Class<? extends VertexValueFactory> vertexValueFactoryClass) {
VERTEX_VALUE_FACTORY_CLASS.set(this, vertexValueFactoryClass);
}
/**
* Set the edge input filter class
*
* @param edgeFilterClass class to use
*/
public void setEdgeInputFilterClass(
Class<? extends EdgeInputFilter> edgeFilterClass) {
EDGE_INPUT_FILTER_CLASS.set(this, edgeFilterClass);
}
/**
* Set the vertex input filter class
*
* @param vertexFilterClass class to use
*/
public void setVertexInputFilterClass(
Class<? extends VertexInputFilter> vertexFilterClass) {
VERTEX_INPUT_FILTER_CLASS.set(this, vertexFilterClass);
}
/**
* Get the vertex edges class
*
* @return vertex edges class
*/
public Class<? extends OutEdges> getOutEdgesClass() {
return VERTEX_EDGES_CLASS.get(this);
}
/**
* Set the vertex edges class
*
* @param outEdgesClass Determines the way edges are stored
*/
public final void setOutEdgesClass(
Class<? extends OutEdges> outEdgesClass) {
VERTEX_EDGES_CLASS.set(this, outEdgesClass);
}
/**
* Set the vertex implementation class
*
* @param vertexClass class of the vertex implementation
*/
public final void setVertexClass(Class<? extends Vertex> vertexClass) {
VERTEX_CLASS.set(this, vertexClass);
}
/**
* Set the vertex edges class used during edge-based input (if different
* from the one used during computation)
*
* @param inputOutEdgesClass Determines the way edges are stored
*/
public final void setInputOutEdgesClass(
Class<? extends OutEdges> inputOutEdgesClass) {
INPUT_VERTEX_EDGES_CLASS.set(this, inputOutEdgesClass);
}
/**
* True if the {@link org.apache.giraph.edge.OutEdges} implementation
* copies the passed edges to its own data structure,
* i.e. it doesn't keep references to Edge objects, target vertex ids or edge
* values passed to add() or initialize().
* This makes it possible to reuse edge objects passed to the data
* structure, to minimize object instantiation (see for example
* EdgeStore#addPartitionEdges()).
*
* @return True iff we can reuse the edge objects
*/
public boolean reuseEdgeObjects() {
return ReuseObjectsOutEdges.class.isAssignableFrom(
getOutEdgesClass());
}
/**
* True if the {@link Partition} implementation copies the passed vertices
* to its own data structure, i.e. it doesn't keep references to Vertex
* objects passed to it.
* This makes it possible to reuse vertex objects passed to the data
* structure, to minimize object instantiation.
*
* @return True iff we can reuse the vertex objects
*/
public boolean reuseVertexObjects() {
return ReusesObjectsPartition.class.isAssignableFrom(getPartitionClass());
}
/**
* Get Partition class used
* @return Partition class
*/
public Class<? extends Partition> getPartitionClass() {
return PARTITION_CLASS.get(this);
}
/**
* Does the job have a {@link VertexInputFormat}?
*
* @return True iff a {@link VertexInputFormat} has been specified.
*/
public boolean hasVertexInputFormat() {
return VERTEX_INPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the vertex input format class (required)
*
* @param vertexInputFormatClass Determines how graph is input
*/
public void setVertexInputFormatClass(
Class<? extends VertexInputFormat> vertexInputFormatClass) {
VERTEX_INPUT_FORMAT_CLASS.set(this, vertexInputFormatClass);
}
/**
* Does the job have a {@link EdgeInputFormat}?
*
* @return True iff a {@link EdgeInputFormat} has been specified.
*/
public boolean hasEdgeInputFormat() {
return EDGE_INPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the edge input format class (required)
*
* @param edgeInputFormatClass Determines how graph is input
*/
public void setEdgeInputFormatClass(
Class<? extends EdgeInputFormat> edgeInputFormatClass) {
EDGE_INPUT_FORMAT_CLASS.set(this, edgeInputFormatClass);
}
/**
* Set the master class (optional)
*
* @param masterComputeClass Runs master computation
*/
public final void setMasterComputeClass(
Class<? extends MasterCompute> masterComputeClass) {
MASTER_COMPUTE_CLASS.set(this, masterComputeClass);
}
/**
* Add a MasterObserver class (optional)
*
* @param masterObserverClass MasterObserver class to add.
*/
public final void addMasterObserverClass(
Class<? extends MasterObserver> masterObserverClass) {
MASTER_OBSERVER_CLASSES.add(this, masterObserverClass);
}
/**
* Add a WorkerObserver class (optional)
*
* @param workerObserverClass WorkerObserver class to add.
*/
public final void addWorkerObserverClass(
Class<? extends WorkerObserver> workerObserverClass) {
WORKER_OBSERVER_CLASSES.add(this, workerObserverClass);
}
/**
* Get job observer class
*
* @return GiraphJobObserver class set.
*/
public Class<? extends GiraphJobObserver> getJobObserverClass() {
return JOB_OBSERVER_CLASS.get(this);
}
/**
* Set job observer class
*
* @param klass GiraphJobObserver class to set.
*/
public void setJobObserverClass(Class<? extends GiraphJobObserver> klass) {
JOB_OBSERVER_CLASS.set(this, klass);
}
/**
* Get job retry checker class
*
* @return GiraphJobRetryChecker class set.
*/
public Class<? extends GiraphJobRetryChecker> getJobRetryCheckerClass() {
return JOB_RETRY_CHECKER_CLASS.get(this);
}
/**
* Set job retry checker class
*
* @param klass GiraphJobRetryChecker class to set.
*/
public void setJobRetryCheckerClass(
Class<? extends GiraphJobRetryChecker> klass) {
JOB_RETRY_CHECKER_CLASS.set(this, klass);
}
/**
* Check whether to enable jmap dumping thread.
*
* @return true if jmap dumper is enabled.
*/
public boolean isJMapHistogramDumpEnabled() {
return JMAP_ENABLE.get(this);
}
/**
* Check whether to enable heap memory supervisor thread
*
* @return true if jmap dumper is reactively enabled
*/
public boolean isReactiveJmapHistogramDumpEnabled() {
return REACTIVE_JMAP_ENABLE.get(this);
}
/**
* Set mapping from a key name to a list of classes.
*
* @param name String key name to use.
* @param xface interface of the classes being set.
* @param klasses Classes to set.
*/
public final void setClasses(String name, Class<?> xface,
Class<?> ... klasses) {
String[] klassNames = new String[klasses.length];
for (int i = 0; i < klasses.length; ++i) {
Class<?> klass = klasses[i];
if (!xface.isAssignableFrom(klass)) {
throw new RuntimeException(klass + " does not implement " +
xface.getName());
}
klassNames[i] = klasses[i].getName();
}
setStrings(name, klassNames);
}
/**
* Does the job have a {@link VertexOutputFormat}?
*
* @return True iff a {@link VertexOutputFormat} has been specified.
*/
public boolean hasVertexOutputFormat() {
return VERTEX_OUTPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the vertex output format class (optional)
*
* @param vertexOutputFormatClass Determines how graph is output
*/
public final void setVertexOutputFormatClass(
Class<? extends VertexOutputFormat> vertexOutputFormatClass) {
VERTEX_OUTPUT_FORMAT_CLASS.set(this, vertexOutputFormatClass);
}
/**
* Does the job have a {@link EdgeOutputFormat} subdir?
*
* @return True iff a {@link EdgeOutputFormat} subdir has been specified.
*/
public boolean hasVertexOutputFormatSubdir() {
return !VERTEX_OUTPUT_FORMAT_SUBDIR.get(this).isEmpty();
}
/**
* Set the vertex output format path
*
* @param path path where the verteces will be written
*/
public final void setVertexOutputFormatSubdir(String path) {
VERTEX_OUTPUT_FORMAT_SUBDIR.set(this, path);
}
/**
* Check if output should be done during computation
*
* @return True iff output should be done during computation
*/
public final boolean doOutputDuringComputation() {
return DO_OUTPUT_DURING_COMPUTATION.get(this);
}
/**
* Set whether or not we should do output during computation
*
* @param doOutputDuringComputation True iff we want output to happen
* during computation
*/
public final void setDoOutputDuringComputation(
boolean doOutputDuringComputation) {
DO_OUTPUT_DURING_COMPUTATION.set(this, doOutputDuringComputation);
}
/**
* Check if VertexOutputFormat is thread-safe
*
* @return True iff VertexOutputFormat is thread-safe
*/
public final boolean vertexOutputFormatThreadSafe() {
return VERTEX_OUTPUT_FORMAT_THREAD_SAFE.get(this);
}
/**
* Set whether or not selected VertexOutputFormat is thread-safe
*
* @param vertexOutputFormatThreadSafe True iff selected VertexOutputFormat
* is thread-safe
*/
public final void setVertexOutputFormatThreadSafe(
boolean vertexOutputFormatThreadSafe) {
VERTEX_OUTPUT_FORMAT_THREAD_SAFE.set(this, vertexOutputFormatThreadSafe);
}
/**
* Does the job have a {@link EdgeOutputFormat}?
*
* @return True iff a {@link EdgeOutputFormat} has been specified.
*/
public boolean hasEdgeOutputFormat() {
return EDGE_OUTPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the edge output format class (optional)
*
* @param edgeOutputFormatClass Determines how graph is output
*/
public final void setEdgeOutputFormatClass(
Class<? extends EdgeOutputFormat> edgeOutputFormatClass) {
EDGE_OUTPUT_FORMAT_CLASS.set(this, edgeOutputFormatClass);
}
/**
* Does the job have a {@link EdgeOutputFormat} subdir?
*
* @return True iff a {@link EdgeOutputFormat} subdir has been specified.
*/
public boolean hasEdgeOutputFormatSubdir() {
return !EDGE_OUTPUT_FORMAT_SUBDIR.get(this).isEmpty();
}
/**
* Set the edge output format path
*
* @param path path where the edges will be written
*/
public final void setEdgeOutputFormatSubdir(String path) {
EDGE_OUTPUT_FORMAT_SUBDIR.set(this, path);
}
/**
* Get the number of threads to use for writing output in the end of the
* application. If output format is not thread safe, returns 1.
*
* @return Number of output threads
*/
public final int getNumOutputThreads() {
if (!vertexOutputFormatThreadSafe()) {
return 1;
} else {
return NUM_OUTPUT_THREADS.get(this);
}
}
/**
* Set the number of threads to use for writing output in the end of the
* application. Will be used only if {#vertexOutputFormatThreadSafe} is true.
*
* @param numOutputThreads Number of output threads
*/
public void setNumOutputThreads(int numOutputThreads) {
NUM_OUTPUT_THREADS.set(this, numOutputThreads);
}
/**
* Get the message combiner class (optional)
*
* @return messageCombinerClass Determines how vertex messages are combined
*/
public Class<? extends MessageCombiner> getMessageCombinerClass() {
return MESSAGE_COMBINER_CLASS.get(this);
}
/**
* Set the message combiner class (optional)
*
* @param messageCombinerClass Determines how vertex messages are combined
*/
public void setMessageCombinerClass(
Class<? extends MessageCombiner> messageCombinerClass) {
MESSAGE_COMBINER_CLASS.set(this, messageCombinerClass);
}
/**
* Set the graph partitioner class (optional)
*
* @param graphPartitionerFactoryClass Determines how the graph is partitioned
*/
public final void setGraphPartitionerFactoryClass(
Class<? extends GraphPartitionerFactory> graphPartitionerFactoryClass) {
GRAPH_PARTITIONER_FACTORY_CLASS.set(this, graphPartitionerFactoryClass);
}
/**
* Set the vertex resolver class (optional)
*
* @param vertexResolverClass Determines how vertex mutations are resolved
*/
public final void setVertexResolverClass(
Class<? extends VertexResolver> vertexResolverClass) {
VERTEX_RESOLVER_CLASS.set(this, vertexResolverClass);
}
/**
* Whether to create a vertex that doesn't exist when it receives messages.
* This only affects DefaultVertexResolver.
*
* @return true if we should create non existent vertices that get messages.
*/
public final boolean getResolverCreateVertexOnMessages() {
return RESOLVER_CREATE_VERTEX_ON_MSGS.get(this);
}
/**
* Set whether to create non existent vertices when they receive messages.
*
* @param v true if we should create vertices when they get messages.
*/
public final void setResolverCreateVertexOnMessages(boolean v) {
RESOLVER_CREATE_VERTEX_ON_MSGS.set(this, v);
}
/**
* Set the vertex value combiner class (optional)
*
* @param vertexValueCombinerClass Determines how vertices are combined
*/
public final void setVertexValueCombinerClass(
Class<? extends VertexValueCombiner> vertexValueCombinerClass) {
VERTEX_VALUE_COMBINER_CLASS.set(this, vertexValueCombinerClass);
}
/**
* Set the worker context class (optional)
*
* @param workerContextClass Determines what code is executed on a each
* worker before and after each superstep and computation
*/
public final void setWorkerContextClass(
Class<? extends WorkerContext> workerContextClass) {
WORKER_CONTEXT_CLASS.set(this, workerContextClass);
}
/**
* Set the aggregator writer class (optional)
*
* @param aggregatorWriterClass Determines how the aggregators are
* written to file at the end of the job
*/
public final void setAggregatorWriterClass(
Class<? extends AggregatorWriter> aggregatorWriterClass) {
AGGREGATOR_WRITER_CLASS.set(this, aggregatorWriterClass);
}
/**
* Set the partition class (optional)
*
* @param partitionClass Determines the why partitions are stored
*/
public final void setPartitionClass(
Class<? extends Partition> partitionClass) {
PARTITION_CLASS.set(this, partitionClass);
}
/**
* Set worker configuration for determining what is required for
* a superstep.
*
* @param minWorkers Minimum workers to do a superstep
* @param maxWorkers Maximum workers to do a superstep
* (max map tasks in job)
* @param minPercentResponded 0 - 100 % of the workers required to
* have responded before continuing the superstep
*/
public final void setWorkerConfiguration(int minWorkers,
int maxWorkers,
float minPercentResponded) {
setInt(MIN_WORKERS, minWorkers);
setInt(MAX_WORKERS, maxWorkers);
MIN_PERCENT_RESPONDED.set(this, minPercentResponded);
}
public final int getMinWorkers() {
return getInt(MIN_WORKERS, -1);
}
public final int getMaxWorkers() {
return getInt(MAX_WORKERS, -1);
}
public final float getMinPercentResponded() {
return MIN_PERCENT_RESPONDED.get(this);
}
/**
* Utilize an existing ZooKeeper service. If this is not set, ZooKeeper
* will be dynamically started by Giraph for this job.
*
* @param serverList Comma separated list of servers and ports
* (i.e. zk1:2221,zk2:2221)
*/
public final void setZooKeeperConfiguration(String serverList) {
ZOOKEEPER_LIST.set(this, serverList);
}
/**
* Getter for SPLIT_MASTER_WORKER flag.
*
* @return boolean flag value.
*/
public final boolean getSplitMasterWorker() {
return SPLIT_MASTER_WORKER.get(this);
}
/**
* Get array of MasterObserver classes set in the configuration.
*
* @return array of MasterObserver classes.
*/
public Class<? extends MasterObserver>[] getMasterObserverClasses() {
return MASTER_OBSERVER_CLASSES.getArray(this);
}
/**
* Get array of WorkerObserver classes set in configuration.
*
* @return array of WorkerObserver classes.
*/
public Class<? extends WorkerObserver>[] getWorkerObserverClasses() {
return WORKER_OBSERVER_CLASSES.getArray(this);
}
/**
* Whether to track, print, and aggregate metrics.
*
* @return true if metrics are enabled, false otherwise (default)
*/
public boolean metricsEnabled() {
return METRICS_ENABLE.isTrue(this);
}
/**
* Get the task partition
*
* @return The task partition or -1 if not set
*/
public int getTaskPartition() {
return getInt("mapred.task.partition", -1);
}
/**
* Is this a "pure YARN" Giraph job, or is a MapReduce layer (v1 or v2)
* actually managing our cluster nodes, i.e. each task is a Mapper.
*
* @return TRUE if this is a pure YARN job.
*/
public boolean isPureYarnJob() {
return IS_PURE_YARN_JOB.get(this);
}
/**
* Jars required in "Pure YARN" jobs (names only, no paths) should
* be listed here in full, including Giraph framework jar(s).
*
* @return the comma-separated list of jar names for export to cluster.
*/
public String getYarnLibJars() {
return GIRAPH_YARN_LIBJARS.get(this);
}
/**
* Populate jar list for Pure YARN jobs.
*
* @param jarList a comma-separated list of jar names
*/
public void setYarnLibJars(String jarList) {
GIRAPH_YARN_LIBJARS.set(this, jarList);
}
/**
* Get heap size (in MB) for each task in our Giraph job run,
* assuming this job will run on the "pure YARN" profile.
*
* @return the heap size for all tasks, in MB
*/
public int getYarnTaskHeapMb() {
return GIRAPH_YARN_TASK_HEAP_MB.get(this);
}
/**
* Set heap size for Giraph tasks in our job run, assuming
* the job will run on the "pure YARN" profile.
*
* @param heapMb the heap size for all tasks
*/
public void setYarnTaskHeapMb(int heapMb) {
GIRAPH_YARN_TASK_HEAP_MB.set(this, heapMb);
}
/**
* Get the ZooKeeper list.
*
* @return ZooKeeper list of strings, comma separated or null if none set.
*/
public String getZookeeperList() {
return ZOOKEEPER_LIST.get(this);
}
/**
* Set the ZooKeeper list to the provided list. This method is used when the
* ZooKeeper is started internally and will set the zkIsExternal option to
* false as well.
*
* @param zkList list of strings, comma separated of zookeeper servers
*/
public void setZookeeperList(String zkList) {
ZOOKEEPER_LIST.set(this, zkList);
ZOOKEEPER_IS_EXTERNAL.set(this, false);
}
/**
* Was ZooKeeper provided externally?
*
* @return true iff was zookeeper is external
*/
public boolean isZookeeperExternal() {
return ZOOKEEPER_IS_EXTERNAL.get(this);
}
public String getLocalLevel() {
return LOG_LEVEL.get(this);
}
/**
* Use the log thread layout option?
*
* @return True if use the log thread layout option, false otherwise
*/
public boolean useLogThreadLayout() {
return LOG_THREAD_LAYOUT.get(this);
}
/**
* is this job run a local test?
*
* @return the test status as recorded in the Configuration
*/
public boolean getLocalTestMode() {
return LOCAL_TEST_MODE.get(this);
}
/**
* Flag this job as a local test run.
*
* @param flag the test status for this job
*/
public void setLocalTestMode(boolean flag) {
LOCAL_TEST_MODE.set(this, flag);
}
/**
* The number of server tasks in our ZK quorum for
* this job run.
*
* @return the number of ZK servers in the quorum
*/
public int getZooKeeperServerCount() {
return ZOOKEEPER_SERVER_COUNT.get(this);
}
public int getZooKeeperSessionTimeout() {
return ZOOKEEPER_SESSION_TIMEOUT.get(this);
}
public int getZookeeperOpsMaxAttempts() {
return ZOOKEEPER_OPS_MAX_ATTEMPTS.get(this);
}
public int getZookeeperOpsRetryWaitMsecs() {
return ZOOKEEPER_OPS_RETRY_WAIT_MSECS.get(this);
}
public boolean getNettyServerUseExecutionHandler() {
return NETTY_SERVER_USE_EXECUTION_HANDLER.get(this);
}
public int getNettyServerThreads() {
return NETTY_SERVER_THREADS.get(this);
}
public int getNettyServerExecutionThreads() {
return NETTY_SERVER_EXECUTION_THREADS.get(this);
}
/**
* Get the netty server execution concurrency. This depends on whether the
* netty server execution handler exists.
*
* @return Server concurrency
*/
public int getNettyServerExecutionConcurrency() {
if (getNettyServerUseExecutionHandler()) {
return getNettyServerExecutionThreads();
} else {
return getNettyServerThreads();
}
}
/**
* Used by netty client and server to create ByteBufAllocator
*
* @return ByteBufAllocator
*/
public ByteBufAllocator getNettyAllocator() {
if (nettyBufferAllocator == null) {
if (NETTY_USE_POOLED_ALLOCATOR.get(this)) { // Use pooled allocator
nettyBufferAllocator = new PooledByteBufAllocator(
NETTY_USE_DIRECT_MEMORY.get(this));
} else { // Use un-pooled allocator
// Note: Current default settings create un-pooled heap allocator
nettyBufferAllocator = new UnpooledByteBufAllocator(
NETTY_USE_DIRECT_MEMORY.get(this));
}
}
return nettyBufferAllocator;
}
public int getZookeeperConnectionAttempts() {
return ZOOKEEPER_CONNECTION_ATTEMPTS.get(this);
}
public int getZooKeeperMinSessionTimeout() {
return ZOOKEEPER_MIN_SESSION_TIMEOUT.get(this);
}
public int getZooKeeperMaxSessionTimeout() {
return ZOOKEEPER_MAX_SESSION_TIMEOUT.get(this);
}
public boolean getZooKeeperForceSync() {
return ZOOKEEPER_FORCE_SYNC.get(this);
}
public boolean getZooKeeperSkipAcl() {
return ZOOKEEPER_SKIP_ACL.get(this);
}
/**
* Get the number of map tasks in this job
*
* @return Number of map tasks in this job
*/
public int getMapTasks() {
int mapTasks = getInt("mapred.map.tasks", -1);
if (mapTasks == -1) {
throw new IllegalStateException("getMapTasks: Failed to get the map " +
"tasks!");
}
return mapTasks;
}
/**
* Use authentication? (if supported)
*
* @return True if should authenticate, false otherwise
*/
public boolean authenticate() {
return AUTHENTICATE.get(this);
}
/**
* Set the number of compute threads
*
* @param numComputeThreads Number of compute threads to use
*/
public void setNumComputeThreads(int numComputeThreads) {
NUM_COMPUTE_THREADS.set(this, numComputeThreads);
}
public int getNumComputeThreads() {
return NUM_COMPUTE_THREADS.get(this);
}
/**
* Set the number of input split threads
*
* @param numInputSplitsThreads Number of input split threads to use
*/
public void setNumInputSplitsThreads(int numInputSplitsThreads) {
NUM_INPUT_THREADS.set(this, numInputSplitsThreads);
}
public int getNumInputSplitsThreads() {
return NUM_INPUT_THREADS.get(this);
}
public long getInputSplitMaxVertices() {
return INPUT_SPLIT_MAX_VERTICES.get(this);
}
public long getInputSplitMaxEdges() {
return INPUT_SPLIT_MAX_EDGES.get(this);
}
/**
* Set whether to use unsafe serialization
*
* @param useUnsafeSerialization If true, use unsafe serialization
*/
public void useUnsafeSerialization(boolean useUnsafeSerialization) {
USE_UNSAFE_SERIALIZATION.set(this, useUnsafeSerialization);
}
/**
* Use message size encoding? This feature may help with complex message
* objects.
*
* @return Whether to use message size encoding
*/
public boolean useMessageSizeEncoding() {
return USE_MESSAGE_SIZE_ENCODING.get(this);
}
/**
* Set the checkpoint frequeuncy of how many supersteps to wait before
* checkpointing
*
* @param checkpointFrequency How often to checkpoint (0 means never)
*/
public void setCheckpointFrequency(int checkpointFrequency) {
CHECKPOINT_FREQUENCY.set(this, checkpointFrequency);
}
/**
* Get the checkpoint frequeuncy of how many supersteps to wait
* before checkpointing
*
* @return Checkpoint frequency (0 means never)
*/
public int getCheckpointFrequency() {
return CHECKPOINT_FREQUENCY.get(this);
}
/**
* Check if checkpointing is used
*
* @return True iff checkpointing is used
*/
public boolean useCheckpointing() {
return getCheckpointFrequency() != 0;
}
/**
* Set the max task attempts
*
* @param maxTaskAttempts Max task attempts to use
*/
public void setMaxTaskAttempts(int maxTaskAttempts) {
MAX_TASK_ATTEMPTS.set(this, maxTaskAttempts);
}
/**
* Get the max task attempts
*
* @return Max task attempts or -1, if not set
*/
public int getMaxTaskAttempts() {
return MAX_TASK_ATTEMPTS.get(this);
}
/**
* Get the number of milliseconds to wait for an event before continuing on
*
* @return Number of milliseconds to wait for an event before continuing on
*/
public int getEventWaitMsecs() {
return EVENT_WAIT_MSECS.get(this);
}
/**
* Set the number of milliseconds to wait for an event before continuing on
*
* @param eventWaitMsecs Number of milliseconds to wait for an event before
* continuing on
*/
public void setEventWaitMsecs(int eventWaitMsecs) {
EVENT_WAIT_MSECS.set(this, eventWaitMsecs);
}
/**
* Get the maximum milliseconds to wait before giving up trying to get the
* minimum number of workers before a superstep.
*
* @return Maximum milliseconds to wait before giving up trying to get the
* minimum number of workers before a superstep
*/
public int getMaxMasterSuperstepWaitMsecs() {
return MAX_MASTER_SUPERSTEP_WAIT_MSECS.get(this);
}
/**
* Set the maximum milliseconds to wait before giving up trying to get the
* minimum number of workers before a superstep.
*
* @param maxMasterSuperstepWaitMsecs Maximum milliseconds to wait before
* giving up trying to get the minimum
* number of workers before a superstep
*/
public void setMaxMasterSuperstepWaitMsecs(int maxMasterSuperstepWaitMsecs) {
MAX_MASTER_SUPERSTEP_WAIT_MSECS.set(this, maxMasterSuperstepWaitMsecs);
}
/**
* Check environment for Hadoop security token location in case we are
* executing the Giraph job on a MRv1 Hadoop cluster.
*/
public void configureHadoopSecurity() {
String hadoopTokenFilePath = System.getenv("HADOOP_TOKEN_FILE_LOCATION");
if (hadoopTokenFilePath != null) {
set("mapreduce.job.credentials.binary", hadoopTokenFilePath);
}
}
/**
* Check if we want to prioritize input splits which reside on the host.
*
* @return True iff we want to use input split locality
*/
public boolean useInputSplitLocality() {
return USE_INPUT_SPLIT_LOCALITY.get(this);
}
/**
* Get the local hostname on the given interface.
*
* @return The local hostname
* @throws UnknownHostException
*/
public String getLocalHostname() throws UnknownHostException {
return DNS.getDefaultHost(
GiraphConstants.DNS_INTERFACE.get(this),
GiraphConstants.DNS_NAMESERVER.get(this)).toLowerCase();
}
/**
* Set the maximum number of supersteps of this application. After this
* many supersteps are executed, the application will shutdown.
*
* @param maxNumberOfSupersteps Maximum number of supersteps
*/
public void setMaxNumberOfSupersteps(int maxNumberOfSupersteps) {
MAX_NUMBER_OF_SUPERSTEPS.set(this, maxNumberOfSupersteps);
}
/**
* Get the maximum number of supersteps of this application. After this
* many supersteps are executed, the application will shutdown.
*
* @return Maximum number of supersteps
*/
public int getMaxNumberOfSupersteps() {
return MAX_NUMBER_OF_SUPERSTEPS.get(this);
}
/**
* Whether the application with change or not the graph topology.
*
* @return true if the graph is static, false otherwise.
*/
public boolean isStaticGraph() {
return STATIC_GRAPH.isTrue(this);
}
/**
* Get the output directory to write YourKit snapshots to
*
* @param context Map context
* @return output directory
*/
public String getYourKitOutputDir(Mapper.Context context) {
final String cacheKey = "giraph.yourkit.outputDirCached";
String outputDir = get(cacheKey);
if (outputDir == null) {
outputDir = getStringVars(YOURKIT_OUTPUT_DIR, YOURKIT_OUTPUT_DIR_DEFAULT,
context);
set(cacheKey, outputDir);
}
return outputDir;
}
/**
* Get string, replacing variables in the output.
*
* %JOB_ID% => job id
* %TASK_ID% => task id
* %USER% => owning user name
*
* @param key name of key to lookup
* @param context mapper context
* @return value for key, with variables expanded
*/
public String getStringVars(String key, Mapper.Context context) {
return getStringVars(key, null, context);
}
/**
* Get string, replacing variables in the output.
*
* %JOB_ID% => job id
* %TASK_ID% => task id
* %USER% => owning user name
*
* @param key name of key to lookup
* @param defaultValue value to return if no mapping exists. This can also
* have variables, which will be substituted.
* @param context mapper context
* @return value for key, with variables expanded
*/
public String getStringVars(String key, String defaultValue,
Mapper.Context context) {
String value = get(key);
if (value == null) {
if (defaultValue == null) {
return null;
}
value = defaultValue;
}
value = value.replace("%JOB_ID%", context.getJobID().toString());
value = value.replace("%TASK_ID%", context.getTaskAttemptID().toString());
value = value.replace("%USER%", get("user.name", "unknown_user"));
return value;
}
/**
* Return if oneMessageToManyIds encoding can be enabled
*
* @return True if this option is true.
*/
public boolean useOneMessageToManyIdsEncoding() {
return MESSAGE_ENCODE_AND_STORE_TYPE.get(this)
.useOneMessageToManyIdsEncoding();
}
/**
* Get option whether to create a source vertex present only in edge input
*
* @return CREATE_EDGE_SOURCE_VERTICES option
*/
public boolean getCreateSourceVertex() {
return CREATE_EDGE_SOURCE_VERTICES.get(this);
}
/**
* set option whether to create a source vertex present only in edge input
* @param createVertex create source vertex option
*/
public void setCreateSourceVertex(boolean createVertex) {
CREATE_EDGE_SOURCE_VERTICES.set(this, createVertex);
}
/**
* Get the maximum timeout (in milliseconds) for waiting for all tasks
* to complete after the job is done.
*
* @return Wait task done timeout in milliseconds.
*/
public int getWaitTaskDoneTimeoutMs() {
return WAIT_TASK_DONE_TIMEOUT_MS.get(this);
}
/**
* Set the maximum timeout (in milliseconds) for waiting for all tasks
* to complete after the job is done.
*
* @param ms Milliseconds to set
*/
public void setWaitTaskDoneTimeoutMs(int ms) {
WAIT_TASK_DONE_TIMEOUT_MS.set(this, ms);
}
/**
* Check whether to track job progress on client or not
*
* @return True if job progress should be tracked on client
*/
public boolean trackJobProgressOnClient() {
return TRACK_JOB_PROGRESS_ON_CLIENT.get(this);
}
/**
* @return Number of retries when creating an HDFS file before failing.
*/
public int getHdfsFileCreationRetries() {
return HDFS_FILE_CREATION_RETRIES.get(this);
}
/**
* @return Milliseconds to wait before retrying an HDFS file creation
* operation.
*/
public int getHdfsFileCreationRetryWaitMs() {
return HDFS_FILE_CREATION_RETRY_WAIT_MS.get(this);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl.schema.generic;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.pulsar.client.api.Schema;
import org.apache.pulsar.client.api.schema.GenericRecord;
import org.apache.pulsar.client.api.schema.GenericSchema;
import org.apache.pulsar.client.impl.schema.AutoConsumeSchema;
import org.apache.pulsar.client.impl.schema.KeyValueSchema;
import org.apache.pulsar.client.impl.schema.KeyValueSchemaInfo;
import org.apache.pulsar.client.impl.schema.SchemaTestUtils.Bar;
import org.apache.pulsar.client.impl.schema.SchemaTestUtils.Foo;
import org.apache.pulsar.common.schema.KeyValue;
import org.apache.pulsar.common.schema.KeyValueEncodingType;
import org.apache.pulsar.common.schema.LongSchemaVersion;
import org.testng.annotations.Test;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.mockito.Mockito.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* Unit testing generic schemas.
* this test is duplicated with GenericSchemaImplTest independent of GenericSchemaImpl
*/
@Slf4j
public class GenericSchemaTest {
@Test
public void testGenericAvroSchema() {
Schema<Foo> encodeSchema = Schema.AVRO(Foo.class);
GenericSchema decodeSchema = GenericAvroSchema.of(encodeSchema.getSchemaInfo());
testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema);
}
@Test
public void testGenericJsonSchema() {
Schema<Foo> encodeSchema = Schema.JSON(Foo.class);
GenericSchema decodeSchema = GenericJsonSchema.of(encodeSchema.getSchemaInfo());
testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema);
}
@Test
public void testAutoAvroSchema() {
// configure encode schema
Schema<Foo> encodeSchema = Schema.AVRO(Foo.class);
// configure the schema info provider
MultiVersionSchemaInfoProvider multiVersionGenericSchemaProvider = mock(MultiVersionSchemaInfoProvider.class);
when(multiVersionGenericSchemaProvider.getSchemaByVersion(any(byte[].class)))
.thenReturn(CompletableFuture.completedFuture(encodeSchema.getSchemaInfo()));
// configure decode schema
AutoConsumeSchema decodeSchema = new AutoConsumeSchema();
decodeSchema.configureSchemaInfo(
"test-topic", "topic", encodeSchema.getSchemaInfo()
);
decodeSchema.setSchemaInfoProvider(multiVersionGenericSchemaProvider);
testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema);
}
@Test
public void testAutoJsonSchema() {
// configure the schema info provider
MultiVersionSchemaInfoProvider multiVersionSchemaInfoProvider = mock(MultiVersionSchemaInfoProvider.class);
GenericSchema genericAvroSchema = GenericAvroSchema.of(Schema.JSON(Foo.class).getSchemaInfo());
when(multiVersionSchemaInfoProvider.getSchemaByVersion(any(byte[].class)))
.thenReturn(CompletableFuture.completedFuture(genericAvroSchema.getSchemaInfo()));
// configure encode schema
Schema<Foo> encodeSchema = Schema.JSON(Foo.class);
// configure decode schema
AutoConsumeSchema decodeSchema = new AutoConsumeSchema();
decodeSchema.configureSchemaInfo("test-topic", "topic", encodeSchema.getSchemaInfo());
decodeSchema.setSchemaInfoProvider(multiVersionSchemaInfoProvider);
testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema);
}
private void testEncodeAndDecodeGenericRecord(Schema<Foo> encodeSchema,
Schema<GenericRecord> decodeSchema) {
int numRecords = 10;
for (int i = 0; i < numRecords; i++) {
Foo foo = newFoo(i);
byte[] data = encodeSchema.encode(foo);
log.info("Decoding : {}", new String(data, UTF_8));
GenericRecord record;
if (decodeSchema instanceof AutoConsumeSchema) {
record = decodeSchema.decode(data, new LongSchemaVersion(0L).bytes());
} else {
record = decodeSchema.decode(data);
}
verifyFooRecord(record, i);
}
}
@Test
public void testKeyValueSchema() {
// configure the schema info provider
MultiVersionSchemaInfoProvider multiVersionSchemaInfoProvider = mock(MultiVersionSchemaInfoProvider.class);
List<Schema<Foo>> encodeSchemas = Lists.newArrayList(
Schema.JSON(Foo.class),
Schema.AVRO(Foo.class)
);
for (Schema<Foo> keySchema : encodeSchemas) {
for (Schema<Foo> valueSchema : encodeSchemas) {
// configure encode schema
Schema<KeyValue<Foo, Foo>> kvSchema = KeyValueSchema.of(
keySchema, valueSchema
);
// configure decode schema
Schema<KeyValue<GenericRecord, GenericRecord>> decodeSchema = KeyValueSchema.of(
Schema.AUTO_CONSUME(), Schema.AUTO_CONSUME()
);
decodeSchema.configureSchemaInfo(
"test-topic", "topic",kvSchema.getSchemaInfo()
);
when(multiVersionSchemaInfoProvider.getSchemaByVersion(any(byte[].class)))
.thenReturn(CompletableFuture.completedFuture(
KeyValueSchemaInfo.encodeKeyValueSchemaInfo(
keySchema,
valueSchema,
KeyValueEncodingType.INLINE
)
));
decodeSchema.setSchemaInfoProvider(multiVersionSchemaInfoProvider);
testEncodeAndDecodeKeyValues(kvSchema, decodeSchema);
}
}
}
private void testEncodeAndDecodeKeyValues(Schema<KeyValue<Foo, Foo>> encodeSchema,
Schema<KeyValue<GenericRecord, GenericRecord>> decodeSchema) {
int numRecords = 10;
for (int i = 0; i < numRecords; i++) {
Foo foo = newFoo(i);
byte[] data = encodeSchema.encode(new KeyValue<>(foo, foo));
KeyValue<GenericRecord, GenericRecord> kv = decodeSchema.decode(data, new LongSchemaVersion(1L).bytes());
verifyFooRecord(kv.getKey(), i);
verifyFooRecord(kv.getValue(), i);
}
}
private static Foo newFoo(int i) {
Foo foo = new Foo();
foo.setField1("field-1-" + i);
foo.setField2("field-2-" + i);
foo.setField3(i);
Bar bar = new Bar();
bar.setField1(i % 2 == 0);
foo.setField4(bar);
foo.setFieldUnableNull("fieldUnableNull-1-" + i);
return foo;
}
private static void verifyFooRecord(GenericRecord record, int i) {
Object field1 = record.getField("field1");
assertEquals("field-1-" + i, field1, "Field 1 is " + field1.getClass());
Object field2 = record.getField("field2");
assertEquals("field-2-" + i, field2, "Field 2 is " + field2.getClass());
Object field3 = record.getField("field3");
assertEquals(i, field3, "Field 3 is " + field3.getClass());
Object field4 = record.getField("field4");
assertTrue(field4 instanceof GenericRecord);
GenericRecord field4Record = (GenericRecord) field4;
assertEquals(i % 2 == 0, field4Record.getField("field1"));
Object fieldUnableNull = record.getField("fieldUnableNull");
assertEquals("fieldUnableNull-1-" + i, fieldUnableNull,
"fieldUnableNull 1 is " + fieldUnableNull.getClass());
}
}
| |
/*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.kubernetes.client.openapi.models;
import java.util.Iterator;
import java.util.List;
/** Generated */
public class V1ReplicaSetListFluentImpl<
A extends io.kubernetes.client.openapi.models.V1ReplicaSetListFluent<A>>
extends io.kubernetes.client.fluent.BaseFluent<A>
implements io.kubernetes.client.openapi.models.V1ReplicaSetListFluent<A> {
public V1ReplicaSetListFluentImpl() {}
public V1ReplicaSetListFluentImpl(io.kubernetes.client.openapi.models.V1ReplicaSetList instance) {
this.withApiVersion(instance.getApiVersion());
this.withItems(instance.getItems());
this.withKind(instance.getKind());
this.withMetadata(instance.getMetadata());
}
private java.lang.String apiVersion;
private java.util.ArrayList<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder> items;
private java.lang.String kind;
private io.kubernetes.client.openapi.models.V1ListMetaBuilder metadata;
public java.lang.String getApiVersion() {
return this.apiVersion;
}
public A withApiVersion(java.lang.String apiVersion) {
this.apiVersion = apiVersion;
return (A) this;
}
public java.lang.Boolean hasApiVersion() {
return this.apiVersion != null;
}
/** Method is deprecated. use withApiVersion instead. */
@java.lang.Deprecated
public A withNewApiVersion(java.lang.String original) {
return (A) withApiVersion(new String(original));
}
public A addToItems(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1ReplicaSet item) {
if (this.items == null) {
this.items =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>();
}
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder =
new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(item);
_visitables.get("items").add(index >= 0 ? index : _visitables.get("items").size(), builder);
this.items.add(index >= 0 ? index : items.size(), builder);
return (A) this;
}
public A setToItems(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1ReplicaSet item) {
if (this.items == null) {
this.items =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>();
}
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder =
new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(item);
if (index < 0 || index >= _visitables.get("items").size()) {
_visitables.get("items").add(builder);
} else {
_visitables.get("items").set(index, builder);
}
if (index < 0 || index >= items.size()) {
items.add(builder);
} else {
items.set(index, builder);
}
return (A) this;
}
public A addToItems(io.kubernetes.client.openapi.models.V1ReplicaSet... items) {
if (this.items == null) {
this.items =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>();
}
for (io.kubernetes.client.openapi.models.V1ReplicaSet item : items) {
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder =
new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(item);
_visitables.get("items").add(builder);
this.items.add(builder);
}
return (A) this;
}
public A addAllToItems(
java.util.Collection<io.kubernetes.client.openapi.models.V1ReplicaSet> items) {
if (this.items == null) {
this.items =
new java.util.ArrayList<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>();
}
for (io.kubernetes.client.openapi.models.V1ReplicaSet item : items) {
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder =
new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(item);
_visitables.get("items").add(builder);
this.items.add(builder);
}
return (A) this;
}
public A removeFromItems(io.kubernetes.client.openapi.models.V1ReplicaSet... items) {
for (io.kubernetes.client.openapi.models.V1ReplicaSet item : items) {
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder =
new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(item);
_visitables.get("items").remove(builder);
if (this.items != null) {
this.items.remove(builder);
}
}
return (A) this;
}
public A removeAllFromItems(
java.util.Collection<io.kubernetes.client.openapi.models.V1ReplicaSet> items) {
for (io.kubernetes.client.openapi.models.V1ReplicaSet item : items) {
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder =
new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(item);
_visitables.get("items").remove(builder);
if (this.items != null) {
this.items.remove(builder);
}
}
return (A) this;
}
public A removeMatchingFromItems(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>
predicate) {
if (items == null) return (A) this;
final Iterator<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder> each = items.iterator();
final List visitables = _visitables.get("items");
while (each.hasNext()) {
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder = each.next();
if (predicate.test(builder)) {
visitables.remove(builder);
each.remove();
}
}
return (A) this;
}
/**
* This method has been deprecated, please use method buildItems instead.
*
* @return The buildable object.
*/
@java.lang.Deprecated
public java.util.List<io.kubernetes.client.openapi.models.V1ReplicaSet> getItems() {
return items != null ? build(items) : null;
}
public java.util.List<io.kubernetes.client.openapi.models.V1ReplicaSet> buildItems() {
return items != null ? build(items) : null;
}
public io.kubernetes.client.openapi.models.V1ReplicaSet buildItem(java.lang.Integer index) {
return this.items.get(index).build();
}
public io.kubernetes.client.openapi.models.V1ReplicaSet buildFirstItem() {
return this.items.get(0).build();
}
public io.kubernetes.client.openapi.models.V1ReplicaSet buildLastItem() {
return this.items.get(items.size() - 1).build();
}
public io.kubernetes.client.openapi.models.V1ReplicaSet buildMatchingItem(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>
predicate) {
for (io.kubernetes.client.openapi.models.V1ReplicaSetBuilder item : items) {
if (predicate.test(item)) {
return item.build();
}
}
return null;
}
public java.lang.Boolean hasMatchingItem(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>
predicate) {
for (io.kubernetes.client.openapi.models.V1ReplicaSetBuilder item : items) {
if (predicate.test(item)) {
return true;
}
}
return false;
}
public A withItems(java.util.List<io.kubernetes.client.openapi.models.V1ReplicaSet> items) {
if (this.items != null) {
_visitables.get("items").removeAll(this.items);
}
if (items != null) {
this.items = new java.util.ArrayList();
for (io.kubernetes.client.openapi.models.V1ReplicaSet item : items) {
this.addToItems(item);
}
} else {
this.items = null;
}
return (A) this;
}
public A withItems(io.kubernetes.client.openapi.models.V1ReplicaSet... items) {
if (this.items != null) {
this.items.clear();
}
if (items != null) {
for (io.kubernetes.client.openapi.models.V1ReplicaSet item : items) {
this.addToItems(item);
}
}
return (A) this;
}
public java.lang.Boolean hasItems() {
return items != null && !items.isEmpty();
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<A> addNewItem() {
return new io.kubernetes.client.openapi.models.V1ReplicaSetListFluentImpl.ItemsNestedImpl();
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<A> addNewItemLike(
io.kubernetes.client.openapi.models.V1ReplicaSet item) {
return new io.kubernetes.client.openapi.models.V1ReplicaSetListFluentImpl.ItemsNestedImpl(
-1, item);
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<A> setNewItemLike(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1ReplicaSet item) {
return new io.kubernetes.client.openapi.models.V1ReplicaSetListFluentImpl.ItemsNestedImpl(
index, item);
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<A> editItem(
java.lang.Integer index) {
if (items.size() <= index) throw new RuntimeException("Can't edit items. Index exceeds size.");
return setNewItemLike(index, buildItem(index));
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<A> editFirstItem() {
if (items.size() == 0) throw new RuntimeException("Can't edit first items. The list is empty.");
return setNewItemLike(0, buildItem(0));
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<A> editLastItem() {
int index = items.size() - 1;
if (index < 0) throw new RuntimeException("Can't edit last items. The list is empty.");
return setNewItemLike(index, buildItem(index));
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<A> editMatchingItem(
java.util.function.Predicate<io.kubernetes.client.openapi.models.V1ReplicaSetBuilder>
predicate) {
int index = -1;
for (int i = 0; i < items.size(); i++) {
if (predicate.test(items.get(i))) {
index = i;
break;
}
}
if (index < 0) throw new RuntimeException("Can't edit matching items. No match found.");
return setNewItemLike(index, buildItem(index));
}
public java.lang.String getKind() {
return this.kind;
}
public A withKind(java.lang.String kind) {
this.kind = kind;
return (A) this;
}
public java.lang.Boolean hasKind() {
return this.kind != null;
}
/** Method is deprecated. use withKind instead. */
@java.lang.Deprecated
public A withNewKind(java.lang.String original) {
return (A) withKind(new String(original));
}
/**
* This method has been deprecated, please use method buildMetadata instead.
*
* @return The buildable object.
*/
@java.lang.Deprecated
public io.kubernetes.client.openapi.models.V1ListMeta getMetadata() {
return this.metadata != null ? this.metadata.build() : null;
}
public io.kubernetes.client.openapi.models.V1ListMeta buildMetadata() {
return this.metadata != null ? this.metadata.build() : null;
}
public A withMetadata(io.kubernetes.client.openapi.models.V1ListMeta metadata) {
_visitables.get("metadata").remove(this.metadata);
if (metadata != null) {
this.metadata = new io.kubernetes.client.openapi.models.V1ListMetaBuilder(metadata);
_visitables.get("metadata").add(this.metadata);
}
return (A) this;
}
public java.lang.Boolean hasMetadata() {
return this.metadata != null;
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.MetadataNested<A>
withNewMetadata() {
return new io.kubernetes.client.openapi.models.V1ReplicaSetListFluentImpl.MetadataNestedImpl();
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.MetadataNested<A>
withNewMetadataLike(io.kubernetes.client.openapi.models.V1ListMeta item) {
return new io.kubernetes.client.openapi.models.V1ReplicaSetListFluentImpl.MetadataNestedImpl(
item);
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.MetadataNested<A>
editMetadata() {
return withNewMetadataLike(getMetadata());
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.MetadataNested<A>
editOrNewMetadata() {
return withNewMetadataLike(
getMetadata() != null
? getMetadata()
: new io.kubernetes.client.openapi.models.V1ListMetaBuilder().build());
}
public io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.MetadataNested<A>
editOrNewMetadataLike(io.kubernetes.client.openapi.models.V1ListMeta item) {
return withNewMetadataLike(getMetadata() != null ? getMetadata() : item);
}
public boolean equals(java.lang.Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
V1ReplicaSetListFluentImpl that = (V1ReplicaSetListFluentImpl) o;
if (apiVersion != null ? !apiVersion.equals(that.apiVersion) : that.apiVersion != null)
return false;
if (items != null ? !items.equals(that.items) : that.items != null) return false;
if (kind != null ? !kind.equals(that.kind) : that.kind != null) return false;
if (metadata != null ? !metadata.equals(that.metadata) : that.metadata != null) return false;
return true;
}
public int hashCode() {
return java.util.Objects.hash(apiVersion, items, kind, metadata, super.hashCode());
}
public class ItemsNestedImpl<N>
extends io.kubernetes.client.openapi.models.V1ReplicaSetFluentImpl<
io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<N>>
implements io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.ItemsNested<N>,
io.kubernetes.client.fluent.Nested<N> {
ItemsNestedImpl(
java.lang.Integer index, io.kubernetes.client.openapi.models.V1ReplicaSet item) {
this.index = index;
this.builder = new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(this, item);
}
ItemsNestedImpl() {
this.index = -1;
this.builder = new io.kubernetes.client.openapi.models.V1ReplicaSetBuilder(this);
}
io.kubernetes.client.openapi.models.V1ReplicaSetBuilder builder;
java.lang.Integer index;
public N and() {
return (N) V1ReplicaSetListFluentImpl.this.setToItems(index, builder.build());
}
public N endItem() {
return and();
}
}
public class MetadataNestedImpl<N>
extends io.kubernetes.client.openapi.models.V1ListMetaFluentImpl<
io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.MetadataNested<N>>
implements io.kubernetes.client.openapi.models.V1ReplicaSetListFluent.MetadataNested<N>,
io.kubernetes.client.fluent.Nested<N> {
MetadataNestedImpl(io.kubernetes.client.openapi.models.V1ListMeta item) {
this.builder = new io.kubernetes.client.openapi.models.V1ListMetaBuilder(this, item);
}
MetadataNestedImpl() {
this.builder = new io.kubernetes.client.openapi.models.V1ListMetaBuilder(this);
}
io.kubernetes.client.openapi.models.V1ListMetaBuilder builder;
public N and() {
return (N) V1ReplicaSetListFluentImpl.this.withMetadata(builder.build());
}
public N endMetadata() {
return and();
}
}
}
| |
package ua.zs.signalcorps;
import android.content.Intent;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarActivity;
import android.view.*;
import android.widget.*;
import ua.zs.elements.Classified;
import ua.zs.elements.Person;
import ua.zs.elements.Rank;
public class HomeActivity extends ActionBarActivity {
protected static Person user;
private DrawerLayout mDrawerLayout;
private ActionBarDrawerToggle mDrawerToggle;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.home);
initiateActionBarIconButton();
initiateDrawerButtons();
initiateHomePageElements();
initiateWeaponView();
}
private void initiateHomePageElements() {
ImageView rankImage = (ImageView) findViewById(R.id.rankView);
TextView rankText = (TextView) findViewById(R.id.rankTextView);
TextView name = (TextView) findViewById(R.id.nameView);
TextView secret = (TextView) findViewById(R.id.secretNameView);
TextView classified = (TextView) findViewById(R.id.classifiedText);
Button equipage = (Button) findViewById(R.id.equipageButton);
rankImage.setImageResource(Rank.toImage(user.getRank()));
rankText.setText(Rank.toString(this, user.getRank()));
name.setText(user.getSecondName() + " " +
user.getFirstName() + " " +
user.getFathersName());
secret.setText(user.getSecretName());
classified.setText(getResources().getString(R.string.classified_level) + " " +
Classified.levelOfPerson(this, user.getClassified()).toLowerCase());
equipage.setText(user.getEquipage() != 0 ?
getResources().getString(R.string.equipage_id) +
String.valueOf(user.getEquipage()) :
getResources().getString(R.string.no_equipage));
equipage.setEnabled(user.getEquipage() != 0);
equipage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent watch = new Intent(HomeActivity.this, WatchEquipageActivity.class);
watch.putExtra("equipage", user.getEquipage());
startActivity(watch);
}
});
}
private void initiateWeaponView() {
ListView list = (ListView) findViewById(R.id.myWeaponView);
SignalCorpsDB dataBase = new SignalCorpsDB(this);
WeaponArrayAdapter adapter = new WeaponArrayAdapter(this,
dataBase.getWeaponOfPerson(user.getSecretName()), false);
list.setAdapter(adapter);
TextView emptyList = (TextView) findViewById(R.id.noWeaponText);
emptyList.setVisibility(list.getCount() > 0 ? View.GONE : View.VISIBLE);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
int chosenWeapon = Integer.parseInt( ((TextView) view.findViewById(R.id.numberView))
.getText()
.toString()
.substring(1));
Intent watch = new Intent(HomeActivity.this, WatchWeaponActivity.class);
watch.putExtra("weapon", chosenWeapon);
startActivity(watch);
}
});
}
private void initiateActionBarIconButton() {
mDrawerLayout = (DrawerLayout) findViewById(R.id.homeDrawer);
mDrawerToggle = new ActionBarDrawerToggle(
this, /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer icon to replace 'Up' caret */
R.string.drawer_open, /* "open drawer" description */
R.string.drawer_close /* "close drawer" description */
) {
/** Called when a drawer has settled in a completely closed state. */
public void onDrawerClosed(View view) {
super.onDrawerClosed(view);
//getActionBar().setTitle(mTitle); // Set title while seeing parent layout
}
/** Called when a drawer has settled in a completely open state. */
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
//getActionBar().setTitle(mDrawerTitle); // Set title while seeing drawer
}
};
mDrawerLayout.setDrawerListener(mDrawerToggle);
getActionBar().setDisplayHomeAsUpEnabled(true);
getActionBar().setHomeButtonEnabled(true);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu items for use in the action bar
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.home_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
// Sync the toggle state after onRestoreInstanceState has occurred.
mDrawerToggle.syncState();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Pass the event to ActionBarDrawerToggle, if it returns
// true, then it has handled the app icon touch event
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
switch(item.getItemId()) {
case R.id.logout:
userLogout();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void initiateDrawerButtons() {
Button navigateHome = (Button) findViewById(R.id.homeButton);
navigateHome.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//finish();
//Intent intent = new Intent(HomeActivity.this, HomeActivity.class);
//startActivity(intent);
mDrawerLayout.closeDrawer(Gravity.LEFT);
}
});
Button navigatePeople = (Button) findViewById(R.id.peopleButton);
navigatePeople.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
Intent intent = new Intent(HomeActivity.this, PeopleActivity.class);
startActivity(intent);
}
});
Button navigateEquipage = (Button) findViewById(R.id.equipagesButton);
navigateEquipage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
Intent intent = new Intent(HomeActivity.this, EquipageActivity.class);
startActivity(intent);
}
});
Button navigateContacts = (Button) findViewById(R.id.contactsButton);
navigateContacts.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
Intent intent = new Intent(HomeActivity.this, ContactsActivity.class);
startActivity(intent);
}
});
Button navigateTransport = (Button) findViewById(R.id.transportButton);
navigateTransport.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
Intent intent = new Intent(HomeActivity.this, TransportActivity.class);
startActivity(intent);
}
});
Button navigateWeapon = (Button) findViewById(R.id.weaponButton);
navigateWeapon.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
Intent intent = new Intent(HomeActivity.this, WeaponActivity.class);
startActivity(intent);
}
});
}
private void userLogout() {
finish();
Intent intent = new Intent(HomeActivity.this, IntroActivity.class);
startActivity(intent);
}
}
| |
// Copyright (C) 2011 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.account;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Iterables;
import com.google.gerrit.common.data.GlobalCapability;
import com.google.gerrit.common.data.GroupReference;
import com.google.gerrit.common.data.PermissionRange;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.common.data.PermissionRule.Action;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.PeerDaemonUser;
import com.google.gerrit.server.git.QueueProvider;
import com.google.gerrit.server.project.ProjectCache;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** Access control management for server-wide capabilities. */
public class CapabilityControl {
public static interface Factory {
public CapabilityControl create(CurrentUser user);
}
private final CapabilityCollection capabilities;
private final CurrentUser user;
private final Map<String, List<PermissionRule>> effective;
private Boolean canAdministrateServer;
private Boolean canEmailReviewers;
@Inject
CapabilityControl(ProjectCache projectCache, @Assisted CurrentUser currentUser) {
capabilities = projectCache.getAllProjects().getCapabilityCollection();
user = currentUser;
effective = new HashMap<String, List<PermissionRule>>();
}
/** Identity of the user the control will compute for. */
public CurrentUser getCurrentUser() {
return user;
}
/** @return true if the user can administer this server. */
public boolean canAdministrateServer() {
if (canAdministrateServer == null) {
canAdministrateServer =
user instanceof PeerDaemonUser
|| matchAny(capabilities.administrateServer, ALLOWED_RULE);
}
return canAdministrateServer;
}
/** @return true if the user can create an account for another user. */
public boolean canCreateAccount() {
return canPerform(GlobalCapability.CREATE_ACCOUNT)
|| canAdministrateServer();
}
/** @return true if the user can create a group. */
public boolean canCreateGroup() {
return canPerform(GlobalCapability.CREATE_GROUP) || canAdministrateServer();
}
/** @return true if the user can create a project. */
public boolean canCreateProject() {
return canPerform(GlobalCapability.CREATE_PROJECT)
|| canAdministrateServer();
}
public boolean canListAuthorCommits(){
return canPerform(GlobalCapability.AUTH_COMMITS) ||canAdministrateServer();
}
/** @return true if the user can email reviewers. */
public boolean canEmailReviewers() {
if (canEmailReviewers == null) {
canEmailReviewers =
matchAny(capabilities.emailReviewers, ALLOWED_RULE)
|| !matchAny(capabilities.emailReviewers,
Predicates.not(ALLOWED_RULE));
}
return canEmailReviewers;
}
/** @return true if the user can kill any running task. */
public boolean canKillTask() {
return canPerform(GlobalCapability.KILL_TASK) || canAdministrateServer();
}
/** @return true if the user can view the server caches. */
public boolean canViewCaches() {
return canPerform(GlobalCapability.VIEW_CACHES) || canAdministrateServer();
}
/** @return true if the user can flush the server's caches. */
public boolean canFlushCaches() {
return canPerform(GlobalCapability.FLUSH_CACHES) || canAdministrateServer();
}
/** @return true if the user can view open connections. */
public boolean canViewConnections() {
return canPerform(GlobalCapability.VIEW_CONNECTIONS)
|| canAdministrateServer();
}
/** @return true if the user can view the entire queue. */
public boolean canViewQueue() {
return canPerform(GlobalCapability.VIEW_QUEUE) || canAdministrateServer();
}
/**
* @return true if the user can force replication to any configured
* destination.
*/
public boolean canStartReplication() {
return canPerform(GlobalCapability.START_REPLICATION)
|| canAdministrateServer();
}
/** @return which priority queue the user's tasks should be submitted to. */
public QueueProvider.QueueType getQueueType() {
// If a non-generic group (that is not Anonymous Users or Registered Users)
// grants us INTERACTIVE permission, use the INTERACTIVE queue even if
// BATCH was otherwise granted. This allows site administrators to grant
// INTERACTIVE to Registered Users, and BATCH to 'CI Servers' and have
// the 'CI Servers' actually use the BATCH queue while everyone else gets
// to use the INTERACTIVE queue without additional grants.
//
GroupMembership groups = user.getEffectiveGroups();
boolean batch = false;
for (PermissionRule r : capabilities.priority) {
if (match(groups, r)) {
switch (r.getAction()) {
case INTERACTIVE:
if (!isGenericGroup(r.getGroup())) {
return QueueProvider.QueueType.INTERACTIVE;
}
break;
case BATCH:
batch = true;
break;
}
}
}
if (batch) {
// If any of our groups matched to the BATCH queue, use it.
return QueueProvider.QueueType.BATCH;
} else {
return QueueProvider.QueueType.INTERACTIVE;
}
}
private static boolean isGenericGroup(GroupReference group) {
return AccountGroup.ANONYMOUS_USERS.equals(group.getUUID())
|| AccountGroup.REGISTERED_USERS.equals(group.getUUID());
}
/** True if the user has this permission. Works only for non labels. */
public boolean canPerform(String permissionName) {
return !access(permissionName).isEmpty();
}
/** The range of permitted values associated with a label permission. */
public PermissionRange getRange(String permission) {
if (GlobalCapability.hasRange(permission)) {
return toRange(permission, access(permission));
}
return null;
}
private static PermissionRange toRange(String permissionName,
List<PermissionRule> ruleList) {
int min = 0;
int max = 0;
for (PermissionRule rule : ruleList) {
min = Math.min(min, rule.getMin());
max = Math.max(max, rule.getMax());
}
return new PermissionRange(permissionName, min, max);
}
/** Rules for the given permission, or the empty list. */
private List<PermissionRule> access(String permissionName) {
List<PermissionRule> rules = effective.get(permissionName);
if (rules != null) {
return rules;
}
rules = capabilities.getPermission(permissionName);
if (rules.isEmpty()) {
effective.put(permissionName, rules);
return rules;
}
GroupMembership groups = user.getEffectiveGroups();
if (rules.size() == 1) {
if (!match(groups, rules.get(0))) {
rules = Collections.emptyList();
}
effective.put(permissionName, rules);
return rules;
}
List<PermissionRule> mine = new ArrayList<PermissionRule>(rules.size());
for (PermissionRule rule : rules) {
if (match(groups, rule)) {
mine.add(rule);
}
}
if (mine.isEmpty()) {
mine = Collections.emptyList();
}
effective.put(permissionName, mine);
return mine;
}
private static final Predicate<PermissionRule> ALLOWED_RULE =
new Predicate<PermissionRule>() {
@Override
public boolean apply(PermissionRule rule) {
return rule.getAction() == Action.ALLOW;
}
};
private boolean matchAny(Iterable<PermissionRule> rules,
Predicate<PermissionRule> predicate) {
Iterable<AccountGroup.UUID> ids =
Iterables.transform(Iterables.filter(rules, predicate),
new Function<PermissionRule, AccountGroup.UUID>() {
@Override
public AccountGroup.UUID apply(PermissionRule rule) {
return rule.getGroup().getUUID();
}
});
return user.getEffectiveGroups().containsAnyOf(ids);
}
private static boolean match(GroupMembership groups, PermissionRule rule) {
return groups.contains(rule.getGroup().getUUID());
}
}
| |
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect.testing.google;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.testing.features.CollectionSize.ZERO;
import static com.google.common.collect.testing.features.MapFeature.ALLOWS_NULL_KEYS;
import static com.google.common.collect.testing.features.MapFeature.ALLOWS_NULL_VALUES;
import static com.google.common.collect.testing.features.MapFeature.SUPPORTS_PUT;
import static org.truth0.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
/**
* Tests for {@link Multimap#putAll(Object, Iterable)}.
*
* @author Louis Wasserman
*/
@GwtCompatible
public class MultimapPutIterableTester<K, V> extends AbstractMultimapTester<K, V, Multimap<K, V>> {
@CollectionSize.Require(absent = ZERO)
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllNonEmptyIterableOnPresentKey() {
assertTrue(multimap().putAll(sampleKeys().e0, new Iterable<V>() {
@Override
public Iterator<V> iterator() {
return Lists.newArrayList(sampleValues().e3, sampleValues().e4).iterator();
}
}));
assertGet(sampleKeys().e0, sampleValues().e0, sampleValues().e3, sampleValues().e4);
}
@CollectionSize.Require(absent = ZERO)
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllNonEmptyCollectionOnPresentKey() {
assertTrue(multimap().putAll(
sampleKeys().e0, Lists.newArrayList(sampleValues().e3, sampleValues().e4)));
assertGet(sampleKeys().e0, sampleValues().e0, sampleValues().e3, sampleValues().e4);
}
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllNonEmptyIterableOnAbsentKey() {
assertTrue(multimap().putAll(sampleKeys().e3, new Iterable<V>() {
@Override
public Iterator<V> iterator() {
return Lists.newArrayList(sampleValues().e3, sampleValues().e4).iterator();
}
}));
assertGet(sampleKeys().e3, sampleValues().e3, sampleValues().e4);
}
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllNonEmptyCollectionOnAbsentKey() {
assertTrue(multimap().putAll(
sampleKeys().e3, Lists.newArrayList(sampleValues().e3, sampleValues().e4)));
assertGet(sampleKeys().e3, sampleValues().e3, sampleValues().e4);
}
@CollectionSize.Require(absent = ZERO)
@MapFeature.Require({SUPPORTS_PUT, ALLOWS_NULL_VALUES})
public void testPutAllNullValueOnPresentKey_supported() {
assertTrue(multimap().putAll(sampleKeys().e0, Lists.newArrayList(sampleValues().e3, null)));
assertGet(sampleKeys().e0, sampleValues().e0, sampleValues().e3, null);
}
@MapFeature.Require({SUPPORTS_PUT, ALLOWS_NULL_VALUES})
public void testPutAllNullValueOnAbsentKey_supported() {
assertTrue(multimap().putAll(sampleKeys().e3, Lists.newArrayList(sampleValues().e3, null)));
assertGet(sampleKeys().e3, sampleValues().e3, null);
}
@MapFeature.Require(value = SUPPORTS_PUT, absent = ALLOWS_NULL_VALUES)
public void testPutAllNullValueSingle_unsupported() {
multimap().putAll(sampleKeys().e1, Lists.newArrayList((V) null));
expectUnchanged();
}
// In principle, it would be nice to apply these two tests to keys with existing values, too.
@MapFeature.Require(value = SUPPORTS_PUT, absent = ALLOWS_NULL_VALUES)
public void testPutAllNullValueNullLast_unsupported() {
int size = getNumElements();
try {
multimap().putAll(sampleKeys().e3, Lists.newArrayList(sampleValues().e3, null));
fail();
} catch (NullPointerException expected) {
}
Collection<V> values = multimap().get(sampleKeys().e3);
if (values.size() == 0) {
expectUnchanged();
// Be extra thorough in case internal state was corrupted by the expected null.
assertEquals(Lists.newArrayList(), Lists.newArrayList(values));
assertEquals(size, multimap().size());
} else {
assertEquals(Lists.newArrayList(sampleValues().e3), Lists.newArrayList(values));
assertEquals(size + 1, multimap().size());
}
}
@MapFeature.Require(value = SUPPORTS_PUT, absent = ALLOWS_NULL_VALUES)
public void testPutAllNullValueNullFirst_unsupported() {
int size = getNumElements();
try {
multimap().putAll(sampleKeys().e3, Lists.newArrayList(null, sampleValues().e3));
fail();
} catch (NullPointerException expected) {
}
/*
* In principle, a Multimap implementation could add e3 first before failing on the null. But
* that seems unlikely enough to be worth complicating the test over, especially if there's any
* chance that a permissive test could mask a bug.
*/
expectUnchanged();
// Be extra thorough in case internal state was corrupted by the expected null.
assertEquals(Lists.newArrayList(), Lists.newArrayList(multimap().get(sampleKeys().e3)));
assertEquals(size, multimap().size());
}
@MapFeature.Require({SUPPORTS_PUT, ALLOWS_NULL_KEYS})
public void testPutAllOnPresentNullKey() {
assertTrue(multimap().putAll(null, Lists.newArrayList(sampleValues().e3, sampleValues().e4)));
assertGet(null, sampleValues().e3, sampleValues().e4);
}
@MapFeature.Require(absent = ALLOWS_NULL_KEYS)
public void testPutAllNullForbidden() {
try {
multimap().putAll(null, Collections.singletonList(sampleValues().e3));
fail("Expected NullPointerException");
} catch (NullPointerException expected) {
// success
}
}
private static final Object[] EMPTY = new Object[0];
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllEmptyCollectionOnAbsentKey() {
assertFalse(multimap().putAll(sampleKeys().e3, Collections.<V>emptyList()));
expectUnchanged();
}
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllEmptyIterableOnAbsentKey() {
Iterable<V> iterable = new Iterable<V>() {
@Override
public Iterator<V> iterator() {
return Iterators.emptyIterator();
}
};
assertFalse(multimap().putAll(sampleKeys().e3, iterable));
expectUnchanged();
}
@CollectionSize.Require(absent = ZERO)
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllEmptyIterableOnPresentKey() {
multimap().putAll(sampleKeys().e0, Collections.<V>emptyList());
expectUnchanged();
}
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllOnlyCallsIteratorOnce() {
Iterable<V> iterable = new Iterable<V>() {
private boolean calledIteratorAlready = false;
@Override
public Iterator<V> iterator() {
checkState(!calledIteratorAlready);
calledIteratorAlready = true;
return Iterators.forArray(sampleValues().e3);
}
};
multimap().putAll(sampleKeys().e3, iterable);
}
@MapFeature.Require(SUPPORTS_PUT)
public void testPutAllPropagatesToGet() {
Collection<V> getCollection = multimap().get(sampleKeys().e0);
int getCollectionSize = getCollection.size();
assertTrue(multimap().putAll(
sampleKeys().e0, Lists.newArrayList(sampleValues().e3, sampleValues().e4)));
assertEquals(getCollectionSize + 2, getCollection.size());
ASSERT.that(getCollection).has().allOf(sampleValues().e3, sampleValues().e4);
}
}
| |
package info.pinlab.ttada.view.swing.audio;
import info.pinlab.ttada.view.swing.GbcFactory;
import info.pinlab.ttada.view.swing.ResourceLoader;
import info.pinlab.ttada.view.swing.ShortcutConsumer;
import info.pinlab.ttada.view.swing.ResourceLoader.IconType;
import info.pinlab.pinsound.app.AudioPlayerListener;
import info.pinlab.pinsound.app.AudioPlayerView;
import info.pinlab.pinsound.app.AudioRecorderListener;
import info.pinlab.pinsound.app.AudioRecorderView;
import java.awt.Color;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.lang.reflect.InvocationTargetException;
import java.util.Set;
import javax.swing.BorderFactory;
import javax.swing.Icon;
import javax.swing.JPanel;
import javax.swing.JToggleButton;
import javax.swing.SwingUtilities;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings("serial")
public class AudioRecorderBar extends JPanel implements AudioRecorderView, AudioPlayerView
{
public static Logger LOG = LoggerFactory.getLogger(AudioRecorderBar.class);
private final Icon recIcon;
private final Icon playIcon;
private final Icon stopIcon;
private final Icon pauseIcon;
private final JToggleButton recBtn;
private final JToggleButton playBtn;
private final AudioProgressBar playRecBar;
private Font audioBarFont = null;
int iconSize = 16; //-- 16, 32, 48
int fontSize = 12;
private Color recColor = new Color (249, 219,210);
private Color playColor = new Color(215,235,221);
private Color barTextCol = new Color(120,140,120);
private AudioRecorderListener recListener = null;
private AudioPlayerListener playerListener = null;
public AudioRecorderBar(){ //??EDT?
this.setLayout(new GridBagLayout());
audioBarFont = ResourceLoader.getFont("ubuntu", fontSize);
playRecBar = new AudioProgressBar();
playRecBar.setBarColor(playColor, Color.WHITE);
playRecBar.setTextColor(barTextCol);
playRecBar.setString("00:00.0");
playRecBar.setFont(audioBarFont);
playRecBar.isStringPainted(true);
// playRecBar.setMaximum((int)maxRecTimeInMs);
recIcon = ResourceLoader.getIcon(IconType.REC, iconSize);
stopIcon = ResourceLoader.getIcon(IconType.STOP, iconSize);
playIcon = ResourceLoader.getIcon(IconType.PLAY, iconSize);
pauseIcon = ResourceLoader.getIcon(IconType.PAUSE, iconSize);
GridBagConstraints gbc = GbcFactory.getFillBoth();
// gbc.gridy = 0;
// add(playRecBar, gbc);
Insets origInsets = gbc.insets;
Insets pBarInsets = new Insets(/*top=*/2, /*left*/ 0, 2, 4);
// gbc.gridx = 0;
// gbc.weighty = 0.0;
// gbc.weightx = 1.0;
// gbc.gridwidth = 1;
gbc.insets = pBarInsets ;
gbc.fill = GridBagConstraints.BOTH;
playRecBar.setBorder(BorderFactory.createLineBorder(Color.LIGHT_GRAY));
add(playRecBar, gbc);
recBtn = new JToggleButton();
recBtn.setFocusable(false);
recBtn.setFocusTraversalKeysEnabled(false);
recBtn.setIcon(recIcon);
gbc.gridx = 1;
gbc.weightx = 0.0;
gbc.gridwidth = 1;
gbc.ipadx = gbc.ipady = 0;
gbc.insets = origInsets;
gbc.fill = GridBagConstraints.NONE;
add(recBtn, gbc);
playBtn = new JToggleButton();
playBtn.setFocusable(false);
playBtn.setFocusTraversalKeysEnabled(false);
playBtn.setIcon(playIcon);
gbc.gridx = 2;
add(playBtn, gbc);
// playBtn.setEnabled(false);
//-- setting listeners --//
recBtn.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e){
boolean reqRecStop = true;
if(recBtn.isSelected()){
recBtn.setIcon(stopIcon);
reqRecStop = false;
// recBtn.setSelected(false);
}else{
reqRecStop = true;
recBtn.setIcon(recIcon);
}
if(AudioRecorderBar.this.recListener == null){
LOG.warn("REC request - but no device!");
}else{
if(reqRecStop){
recListener.reqRecStop();
}else{
recListener.reqRecStart();
}
}
}
});
playBtn.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if(playBtn.isSelected()){
playBtn.setIcon(pauseIcon);
}else{
playBtn.setIcon(playIcon);
}
if(AudioRecorderBar.this.playerListener == null){
LOG.error("Play request - but no device!");
}else{
playerListener.reqPauseToggle();
}
}
});
}
public void doToggleRecBtn(){
recBtn.doClick();
}
public void doTogglePlayBtn(){
playBtn.doClick();
}
@Override
public void setRecActionListener(AudioRecorderListener l) {
recListener = l;
}
@Override
public void setPlayActionListener(AudioPlayerListener l) {
playerListener = l;
}
@Override
public void setRecordingState(){
playRecBar.stopInfinitProgress();
runOnEdt(new Runnable() {
@Override
public void run() {
playRecBar.setBarColor(recColor,Color.WHITE);
playRecBar.setCursor(0);
recBtn.setSelected(true);
recBtn.setIcon(stopIcon);
playBtn.setEnabled(false);
playBtn.setSelected(false);
playBtn.setIcon(playIcon);
}
});
}
@Override
public void setReadyToRecState(){
playRecBar.stopInfinitProgress();
runOnEdt(
new Runnable() {
@Override
public void run() {
playRecBar.setCursor(0);
recBtn.setEnabled(true);
recBtn.setSelected(false);
recBtn.setIcon(recIcon);
}
});
// playRecBar.repaint();
}
@Override
public void setBusyState() {
try {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
recBtn.setIcon(recIcon);
recBtn.setSelected(false);
recBtn.setEnabled(false);
playBtn.setIcon(playIcon);
playBtn.setSelected(false);
playBtn.setEnabled(false);
playRecBar.isDisplayTime(false);
playRecBar.setString("collecting audio..");
}
});
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
playRecBar.startInfinitProgress();
}
@Override
public void setRecEnabled(boolean b){
runOnEdt(new Runnable() {
@Override
public void run(){
recBtn.setEnabled(false);
}
});
}
@Override
public void setRecMaxPosInMs(final long ms){
runOnEdt(new Runnable() {@Override
public void run() {
playRecBar.setMaximum(ms);
}
});
}
@Override
public void setRecPosInMs(final long ms) {
final long mins = ms/ (60*1000);
final long secs = (ms-60*1000*mins) / 1000;
final long mss = (ms-mins*60*1000-secs*1000)/100;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
playRecBar.setString(String.format("%02d:%02d.%1d", mins, secs, mss));
playRecBar.setCursor(ms);
playRecBar.repaint();
}
});
}
@Override
public void setPlayingState(){
runOnEdt(new Runnable() {
@Override
public void run() {
recBtn.setEnabled(false);
}
});
}
@Override
public void setReadyToPlayState(){
// System.out.println("ready2PLAY " + playRecBar.getMaximum());
playRecBar.stopInfinitProgress();
runOnEdt(new Runnable() {
@Override
public void run() {
playRecBar.setBarColor(playColor, Color.WHITE);
playRecBar.setCursor(playRecBar.getMaximum());
// System.out.println("Play max " + playRecBar.getMaximum());
playBtn.setSelected(false);
playBtn.setIcon(playIcon);
playBtn.setEnabled(true);
// recBtn.setEnabled(true);
// recBtn.setIcon(recIcon);
// recBtn.setSelected(false);
}
});
playRecBar.repaint();
}
@Override
public void setPlayMaxLenInMs(final long ms){
playRecBar.setMaximum(ms);
playRecBar.setString(String.format("%4.1f", ms/1000.0d));
}
@Override
public void setPlayPosInMs(final long ms) {
final long mins = ms/ (60*1000);
final long secs = (ms-60*1000*mins) / 1000;
final long mss = (ms-mins*60*1000-secs*1000)/100;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
playRecBar.setString(String.format("%02d:%02d.%1d", mins, secs, mss));
playRecBar.setCursor(ms);
playRecBar.repaint();
}
});
}
static void runOnEdt(Runnable doRun){
if(SwingUtilities.isEventDispatchThread()){
doRun.run();
}else{
SwingUtilities.invokeLater(doRun);
}
}
@Override
public void setPlayEnabled(boolean b) {
playBtn.setSelected(false);
playBtn.setEnabled(b);
playBtn.setIcon(playIcon);
}
}
| |
/*
Derby - Class org.apache.derby.impl.store.raw.data.RememberBytesInputStream
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.store.raw.data;
import org.apache.derby.shared.common.sanity.SanityManager;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
A FilterInputStream that remembers read or skipped bytes.
<P>In record mode this stream remembers all the bytes a
caller reads or skips. After reading some bytes this
returns a 'replay' stream to re-read them.
<P>A caller may call getReplaySteam to get a stream
to re-read the the remembered bytes. Any number of calls
to getReplayStream are supported.
<P>The clear function causes this stream to forget the remembered
bytes and re-enter record mode.
*/
public class RememberBytesInputStream extends FilterInputStream
{
ByteHolder bh;
boolean recording = true;
// In case of streams (e.g ReaderToUTF8Stream,
// RawToBinaryFormatStream) that cannot be re-used
// a read on a closed stream will throw an EOFException
// hence keep track if the stream is closed or not
boolean streamClosed = false;
/**
Construct a RememberBytesInputStream.
@param bh for storing the remembered bytes. (must be
in writing mode.
*/
public RememberBytesInputStream(InputStream in, ByteHolder bh) {
super(in);
if (SanityManager.DEBUG)
SanityManager.ASSERT(bh.writingMode());
this.bh = bh;
}
/**
@see java.io.InputStream#read
@exception IOException thrown on an io error spooling rememberd bytes
to backing storage.
*/
public int read() throws IOException {
if (SanityManager.DEBUG)
SanityManager.ASSERT(recording,
"Must be in record mode to perform a read.");
int value = -1;
if ( !streamClosed )
{
value = super.read();
if ( value != -1 )
bh.write(value);
else
streamClosed =true;
}
return value;
}
/**
@see java.io.InputStream#read
@exception IOException thrown on an io error spooling rememberd bytes
to backing storage.
*/
public int read(byte b[], int off, int len) throws IOException {
if (SanityManager.DEBUG)
SanityManager.ASSERT(recording,
"Must be in record mode to perform a read.");
if ( !streamClosed ) {
if ((len + off) > b.length)
len = b.length - off;
len = super.read(b, off, len);
if (len > 0 )
bh.write(b, off, len);
else
streamClosed = true;
} else {
return -1;
}
return len;
}
/**
read len bytes from the input stream, and store it in the byte holder.
Note, fillBuf does not return negative values, if there are no
bytes to store in the byteholder, it will return 0
@exception IOException thrown on an io error spooling rememberd bytes
to backing storage.
*/
public long fillBuf(int len) throws IOException{
long val = 0;
if ( !streamClosed )
{
val = bh.write(this.in, len);
// if bh.write returns less than len, then the stream
// has reached end of stream. See logic in MemByteHolder.write
if ( val < len )
streamClosed=true;
}
return val;
}
/**
read len bytes from the byte holder, and write it to the output stream.
@exception IOException thrown on an io error spooling rememberd bytes
to backing storage.
*/
public int putBuf(OutputStream out, int len) throws IOException {
bh.startReading();
return bh.read(out, len);
}
/**
@see java.io.InputStream#skip
@exception IOException thrown on an io error spooling rememberd bytes
to backing storage.
*/
public long skip(long count) throws IOException {
if (SanityManager.DEBUG)
SanityManager.ASSERT(recording,
"Must be in record mode to perform a read.");
return bh.write(in,count);
}
/**
Get an input stream for re-reading the remembered bytes.
*/
public InputStream getReplayStream() throws IOException {
bh.startReading();
recording = false;
return new ByteHolderInputStream(bh);
}
/**
Get the byteHolder.
*/
public ByteHolder getByteHolder() throws IOException {
return bh;
}
/**
Clear all the remembered bytes. This stream will
remember any bytes read after this call.
@exception IOException thrown on an io error clearing backing
storage.
*/
public void clear() throws IOException {
bh.clear();
recording = true;
}
/**
Set the InputStream from which this reads.
<P>Please note this does not clear remembered
bytes.
*/
public void setInput(InputStream in) {
this.in = in;
streamClosed = false;
}
/**
Return true iff this RememberBytesInputStream is
in recording mode.
*/
public boolean recording() {
return recording;
}
/**
Return the number of bytes remains in the byteHolder
for reading, without setting the write/read mode.
*/
public int available() throws IOException {
// may not have set reading to be true, then,
// we are getting available in negative numbers.
int remainingBytes = bh.available();
remainingBytes = remainingBytes > 0 ? remainingBytes : (-1) * remainingBytes;
return remainingBytes;
}
/**
Return the number of bytes that have been saved to this byte holder.
This result is different from available() as it is unaffected by the
current read position on the ByteHolder.
*/
public int numBytesSaved() throws IOException
{
return(bh.numBytesSaved());
}
/**
remove the remaining bytes in the byteHolder to the beginning
set the position to start recording just after these bytes.
returns how many bytes was transfered to the beginning.
*/
public int shiftToFront() throws IOException {
int bytesShifted = bh.shiftToFront();
return bytesShifted;
}
/**
@see java.lang.Object#toString
*/
public String toString()
{
return
"RememberBytesInputStream: "+
" recording: "+recording+" "+bh;
}
}
| |
/**
*/
package gluemodel.CIM.IEC61970.Core.impl;
import gluemodel.CIM.IEC61970.Core.CorePackage;
import gluemodel.CIM.IEC61970.Core.Curve;
import gluemodel.CIM.IEC61970.Core.CurveData;
import gluemodel.CIM.IEC61970.Core.CurveStyle;
import gluemodel.CIM.IEC61970.Domain.UnitMultiplier;
import gluemodel.CIM.IEC61970.Domain.UnitSymbol;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Curve</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getY2Unit <em>Y2 Unit</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getXMultiplier <em>XMultiplier</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getY3Multiplier <em>Y3 Multiplier</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getY1Unit <em>Y1 Unit</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getCurveStyle <em>Curve Style</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getY3Unit <em>Y3 Unit</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getXUnit <em>XUnit</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getCurveDatas <em>Curve Datas</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getY2Multiplier <em>Y2 Multiplier</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Core.impl.CurveImpl#getY1Multiplier <em>Y1 Multiplier</em>}</li>
* </ul>
*
* @generated
*/
public class CurveImpl extends IdentifiedObjectImpl implements Curve {
/**
* The default value of the '{@link #getY2Unit() <em>Y2 Unit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY2Unit()
* @generated
* @ordered
*/
protected static final UnitSymbol Y2_UNIT_EDEFAULT = UnitSymbol.A;
/**
* The cached value of the '{@link #getY2Unit() <em>Y2 Unit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY2Unit()
* @generated
* @ordered
*/
protected UnitSymbol y2Unit = Y2_UNIT_EDEFAULT;
/**
* The default value of the '{@link #getXMultiplier() <em>XMultiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getXMultiplier()
* @generated
* @ordered
*/
protected static final UnitMultiplier XMULTIPLIER_EDEFAULT = UnitMultiplier.SMALL_M;
/**
* The cached value of the '{@link #getXMultiplier() <em>XMultiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getXMultiplier()
* @generated
* @ordered
*/
protected UnitMultiplier xMultiplier = XMULTIPLIER_EDEFAULT;
/**
* The default value of the '{@link #getY3Multiplier() <em>Y3 Multiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY3Multiplier()
* @generated
* @ordered
*/
protected static final UnitMultiplier Y3_MULTIPLIER_EDEFAULT = UnitMultiplier.SMALL_M;
/**
* The cached value of the '{@link #getY3Multiplier() <em>Y3 Multiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY3Multiplier()
* @generated
* @ordered
*/
protected UnitMultiplier y3Multiplier = Y3_MULTIPLIER_EDEFAULT;
/**
* The default value of the '{@link #getY1Unit() <em>Y1 Unit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY1Unit()
* @generated
* @ordered
*/
protected static final UnitSymbol Y1_UNIT_EDEFAULT = UnitSymbol.A;
/**
* The cached value of the '{@link #getY1Unit() <em>Y1 Unit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY1Unit()
* @generated
* @ordered
*/
protected UnitSymbol y1Unit = Y1_UNIT_EDEFAULT;
/**
* The default value of the '{@link #getCurveStyle() <em>Curve Style</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCurveStyle()
* @generated
* @ordered
*/
protected static final CurveStyle CURVE_STYLE_EDEFAULT = CurveStyle.STRAIGHT_LINE_YVALUES;
/**
* The cached value of the '{@link #getCurveStyle() <em>Curve Style</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCurveStyle()
* @generated
* @ordered
*/
protected CurveStyle curveStyle = CURVE_STYLE_EDEFAULT;
/**
* The default value of the '{@link #getY3Unit() <em>Y3 Unit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY3Unit()
* @generated
* @ordered
*/
protected static final UnitSymbol Y3_UNIT_EDEFAULT = UnitSymbol.A;
/**
* The cached value of the '{@link #getY3Unit() <em>Y3 Unit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY3Unit()
* @generated
* @ordered
*/
protected UnitSymbol y3Unit = Y3_UNIT_EDEFAULT;
/**
* The default value of the '{@link #getXUnit() <em>XUnit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getXUnit()
* @generated
* @ordered
*/
protected static final UnitSymbol XUNIT_EDEFAULT = UnitSymbol.A;
/**
* The cached value of the '{@link #getXUnit() <em>XUnit</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getXUnit()
* @generated
* @ordered
*/
protected UnitSymbol xUnit = XUNIT_EDEFAULT;
/**
* The cached value of the '{@link #getCurveDatas() <em>Curve Datas</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCurveDatas()
* @generated
* @ordered
*/
protected EList<CurveData> curveDatas;
/**
* The default value of the '{@link #getY2Multiplier() <em>Y2 Multiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY2Multiplier()
* @generated
* @ordered
*/
protected static final UnitMultiplier Y2_MULTIPLIER_EDEFAULT = UnitMultiplier.SMALL_M;
/**
* The cached value of the '{@link #getY2Multiplier() <em>Y2 Multiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY2Multiplier()
* @generated
* @ordered
*/
protected UnitMultiplier y2Multiplier = Y2_MULTIPLIER_EDEFAULT;
/**
* The default value of the '{@link #getY1Multiplier() <em>Y1 Multiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY1Multiplier()
* @generated
* @ordered
*/
protected static final UnitMultiplier Y1_MULTIPLIER_EDEFAULT = UnitMultiplier.SMALL_M;
/**
* The cached value of the '{@link #getY1Multiplier() <em>Y1 Multiplier</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getY1Multiplier()
* @generated
* @ordered
*/
protected UnitMultiplier y1Multiplier = Y1_MULTIPLIER_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected CurveImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return CorePackage.Literals.CURVE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitSymbol getY2Unit() {
return y2Unit;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setY2Unit(UnitSymbol newY2Unit) {
UnitSymbol oldY2Unit = y2Unit;
y2Unit = newY2Unit == null ? Y2_UNIT_EDEFAULT : newY2Unit;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__Y2_UNIT, oldY2Unit, y2Unit));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitMultiplier getXMultiplier() {
return xMultiplier;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setXMultiplier(UnitMultiplier newXMultiplier) {
UnitMultiplier oldXMultiplier = xMultiplier;
xMultiplier = newXMultiplier == null ? XMULTIPLIER_EDEFAULT : newXMultiplier;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__XMULTIPLIER, oldXMultiplier, xMultiplier));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitMultiplier getY3Multiplier() {
return y3Multiplier;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setY3Multiplier(UnitMultiplier newY3Multiplier) {
UnitMultiplier oldY3Multiplier = y3Multiplier;
y3Multiplier = newY3Multiplier == null ? Y3_MULTIPLIER_EDEFAULT : newY3Multiplier;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__Y3_MULTIPLIER, oldY3Multiplier, y3Multiplier));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitSymbol getY1Unit() {
return y1Unit;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setY1Unit(UnitSymbol newY1Unit) {
UnitSymbol oldY1Unit = y1Unit;
y1Unit = newY1Unit == null ? Y1_UNIT_EDEFAULT : newY1Unit;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__Y1_UNIT, oldY1Unit, y1Unit));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CurveStyle getCurveStyle() {
return curveStyle;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setCurveStyle(CurveStyle newCurveStyle) {
CurveStyle oldCurveStyle = curveStyle;
curveStyle = newCurveStyle == null ? CURVE_STYLE_EDEFAULT : newCurveStyle;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__CURVE_STYLE, oldCurveStyle, curveStyle));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitSymbol getY3Unit() {
return y3Unit;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setY3Unit(UnitSymbol newY3Unit) {
UnitSymbol oldY3Unit = y3Unit;
y3Unit = newY3Unit == null ? Y3_UNIT_EDEFAULT : newY3Unit;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__Y3_UNIT, oldY3Unit, y3Unit));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitSymbol getXUnit() {
return xUnit;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setXUnit(UnitSymbol newXUnit) {
UnitSymbol oldXUnit = xUnit;
xUnit = newXUnit == null ? XUNIT_EDEFAULT : newXUnit;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__XUNIT, oldXUnit, xUnit));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<CurveData> getCurveDatas() {
if (curveDatas == null) {
curveDatas = new EObjectWithInverseResolvingEList<CurveData>(CurveData.class, this, CorePackage.CURVE__CURVE_DATAS, CorePackage.CURVE_DATA__CURVE);
}
return curveDatas;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitMultiplier getY2Multiplier() {
return y2Multiplier;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setY2Multiplier(UnitMultiplier newY2Multiplier) {
UnitMultiplier oldY2Multiplier = y2Multiplier;
y2Multiplier = newY2Multiplier == null ? Y2_MULTIPLIER_EDEFAULT : newY2Multiplier;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__Y2_MULTIPLIER, oldY2Multiplier, y2Multiplier));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public UnitMultiplier getY1Multiplier() {
return y1Multiplier;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setY1Multiplier(UnitMultiplier newY1Multiplier) {
UnitMultiplier oldY1Multiplier = y1Multiplier;
y1Multiplier = newY1Multiplier == null ? Y1_MULTIPLIER_EDEFAULT : newY1Multiplier;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.CURVE__Y1_MULTIPLIER, oldY1Multiplier, y1Multiplier));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case CorePackage.CURVE__CURVE_DATAS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getCurveDatas()).basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case CorePackage.CURVE__CURVE_DATAS:
return ((InternalEList<?>)getCurveDatas()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case CorePackage.CURVE__Y2_UNIT:
return getY2Unit();
case CorePackage.CURVE__XMULTIPLIER:
return getXMultiplier();
case CorePackage.CURVE__Y3_MULTIPLIER:
return getY3Multiplier();
case CorePackage.CURVE__Y1_UNIT:
return getY1Unit();
case CorePackage.CURVE__CURVE_STYLE:
return getCurveStyle();
case CorePackage.CURVE__Y3_UNIT:
return getY3Unit();
case CorePackage.CURVE__XUNIT:
return getXUnit();
case CorePackage.CURVE__CURVE_DATAS:
return getCurveDatas();
case CorePackage.CURVE__Y2_MULTIPLIER:
return getY2Multiplier();
case CorePackage.CURVE__Y1_MULTIPLIER:
return getY1Multiplier();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case CorePackage.CURVE__Y2_UNIT:
setY2Unit((UnitSymbol)newValue);
return;
case CorePackage.CURVE__XMULTIPLIER:
setXMultiplier((UnitMultiplier)newValue);
return;
case CorePackage.CURVE__Y3_MULTIPLIER:
setY3Multiplier((UnitMultiplier)newValue);
return;
case CorePackage.CURVE__Y1_UNIT:
setY1Unit((UnitSymbol)newValue);
return;
case CorePackage.CURVE__CURVE_STYLE:
setCurveStyle((CurveStyle)newValue);
return;
case CorePackage.CURVE__Y3_UNIT:
setY3Unit((UnitSymbol)newValue);
return;
case CorePackage.CURVE__XUNIT:
setXUnit((UnitSymbol)newValue);
return;
case CorePackage.CURVE__CURVE_DATAS:
getCurveDatas().clear();
getCurveDatas().addAll((Collection<? extends CurveData>)newValue);
return;
case CorePackage.CURVE__Y2_MULTIPLIER:
setY2Multiplier((UnitMultiplier)newValue);
return;
case CorePackage.CURVE__Y1_MULTIPLIER:
setY1Multiplier((UnitMultiplier)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case CorePackage.CURVE__Y2_UNIT:
setY2Unit(Y2_UNIT_EDEFAULT);
return;
case CorePackage.CURVE__XMULTIPLIER:
setXMultiplier(XMULTIPLIER_EDEFAULT);
return;
case CorePackage.CURVE__Y3_MULTIPLIER:
setY3Multiplier(Y3_MULTIPLIER_EDEFAULT);
return;
case CorePackage.CURVE__Y1_UNIT:
setY1Unit(Y1_UNIT_EDEFAULT);
return;
case CorePackage.CURVE__CURVE_STYLE:
setCurveStyle(CURVE_STYLE_EDEFAULT);
return;
case CorePackage.CURVE__Y3_UNIT:
setY3Unit(Y3_UNIT_EDEFAULT);
return;
case CorePackage.CURVE__XUNIT:
setXUnit(XUNIT_EDEFAULT);
return;
case CorePackage.CURVE__CURVE_DATAS:
getCurveDatas().clear();
return;
case CorePackage.CURVE__Y2_MULTIPLIER:
setY2Multiplier(Y2_MULTIPLIER_EDEFAULT);
return;
case CorePackage.CURVE__Y1_MULTIPLIER:
setY1Multiplier(Y1_MULTIPLIER_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case CorePackage.CURVE__Y2_UNIT:
return y2Unit != Y2_UNIT_EDEFAULT;
case CorePackage.CURVE__XMULTIPLIER:
return xMultiplier != XMULTIPLIER_EDEFAULT;
case CorePackage.CURVE__Y3_MULTIPLIER:
return y3Multiplier != Y3_MULTIPLIER_EDEFAULT;
case CorePackage.CURVE__Y1_UNIT:
return y1Unit != Y1_UNIT_EDEFAULT;
case CorePackage.CURVE__CURVE_STYLE:
return curveStyle != CURVE_STYLE_EDEFAULT;
case CorePackage.CURVE__Y3_UNIT:
return y3Unit != Y3_UNIT_EDEFAULT;
case CorePackage.CURVE__XUNIT:
return xUnit != XUNIT_EDEFAULT;
case CorePackage.CURVE__CURVE_DATAS:
return curveDatas != null && !curveDatas.isEmpty();
case CorePackage.CURVE__Y2_MULTIPLIER:
return y2Multiplier != Y2_MULTIPLIER_EDEFAULT;
case CorePackage.CURVE__Y1_MULTIPLIER:
return y1Multiplier != Y1_MULTIPLIER_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (y2Unit: ");
result.append(y2Unit);
result.append(", xMultiplier: ");
result.append(xMultiplier);
result.append(", y3Multiplier: ");
result.append(y3Multiplier);
result.append(", y1Unit: ");
result.append(y1Unit);
result.append(", curveStyle: ");
result.append(curveStyle);
result.append(", y3Unit: ");
result.append(y3Unit);
result.append(", xUnit: ");
result.append(xUnit);
result.append(", y2Multiplier: ");
result.append(y2Multiplier);
result.append(", y1Multiplier: ");
result.append(y1Multiplier);
result.append(')');
return result.toString();
}
} //CurveImpl
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.JarURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLConnection;
import java.util.Set;
import java.util.jar.JarEntry;
import javax.servlet.ServletContext;
import javax.servlet.jsp.tagext.TagInfo;
import org.apache.jasper.compiler.Compiler;
import org.apache.jasper.compiler.JspRuntimeContext;
import org.apache.jasper.compiler.JspUtil;
import org.apache.jasper.compiler.Localizer;
import org.apache.jasper.compiler.ServletWriter;
import org.apache.jasper.compiler.TldCache;
import org.apache.jasper.servlet.JasperLoader;
import org.apache.jasper.servlet.JspServletWrapper;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.Jar;
import org.apache.tomcat.util.descriptor.tld.TldResourcePath;
/**
* A place holder for various things that are used through out the JSP
* engine. This is a per-request/per-context data structure. Some of
* the instance variables are set at different points.
*
* Most of the path-related stuff is here - mangling names, versions, dirs,
* loading resources and dealing with uris.
*
* @author Anil K. Vijendran
* @author Harish Prabandham
* @author Pierre Delisle
* @author Costin Manolache
* @author Kin-man Chung
*/
public class JspCompilationContext {
private final Log log = LogFactory.getLog(JspCompilationContext.class); // must not be static
private String className;
private final String jspUri;
private String basePackageName;
private String derivedPackageName;
private String servletJavaFileName;
private String javaPath;
private String classFileName;
private ServletWriter writer;
private final Options options;
private final JspServletWrapper jsw;
private Compiler jspCompiler;
private String classPath;
private final String baseURI;
private String outputDir;
private final ServletContext context;
private ClassLoader loader;
private final JspRuntimeContext rctxt;
private volatile boolean removed = false;
private URLClassLoader jspLoader;
private URL baseUrl;
private Class<?> servletClass;
private final boolean isTagFile;
private boolean protoTypeMode;
private TagInfo tagInfo;
private Jar tagJar;
// jspURI _must_ be relative to the context
public JspCompilationContext(String jspUri, Options options,
ServletContext context, JspServletWrapper jsw,
JspRuntimeContext rctxt) {
this(jspUri, null, options, context, jsw, rctxt, null, false);
}
public JspCompilationContext(String tagfile, TagInfo tagInfo,
Options options, ServletContext context, JspServletWrapper jsw,
JspRuntimeContext rctxt, Jar tagJar) {
this(tagfile, tagInfo, options, context, jsw, rctxt, tagJar, true);
}
private JspCompilationContext(String jspUri, TagInfo tagInfo,
Options options, ServletContext context, JspServletWrapper jsw,
JspRuntimeContext rctxt, Jar tagJar, boolean isTagFile) {
this.jspUri = canonicalURI(jspUri);
this.options = options;
this.jsw = jsw;
this.context = context;
// Pax-Web enhanced-on
if (null == this.options.getTldCache()) {
if (this.options instanceof EmbeddedServletOptions) {
((EmbeddedServletOptions) this.options).setTldCache(TldCache.getInstance(context));
}
}
// Pax-Web enhanced-off
String baseURI = jspUri.substring(0, jspUri.lastIndexOf('/') + 1);
// hack fix for resolveRelativeURI
if (baseURI.isEmpty()) {
baseURI = "/";
} else if (baseURI.charAt(0) != '/') {
// strip the base slash since it will be combined with the
// uriBase to generate a file
baseURI = "/" + baseURI;
}
if (baseURI.charAt(baseURI.length() - 1) != '/') {
baseURI += '/';
}
this.baseURI = baseURI;
this.rctxt = rctxt;
this.basePackageName = Constants.JSP_PACKAGE_NAME;
this.tagInfo = tagInfo;
this.tagJar = tagJar;
this.isTagFile = isTagFile;
}
/* ==================== Methods to override ==================== */
/** ---------- Class path and loader ---------- */
/**
* @return the classpath that is passed off to the Java compiler.
*/
public String getClassPath() {
if( classPath != null ) {
return classPath;
}
return rctxt.getClassPath();
}
/**
* The classpath that is passed off to the Java compiler.
* @param classPath The class path to use
*/
public void setClassPath(String classPath) {
this.classPath = classPath;
}
/**
* What class loader to use for loading classes while compiling
* this JSP?
* @return the class loader used to load all compiled classes
*/
public ClassLoader getClassLoader() {
if( loader != null ) {
return loader;
}
return rctxt.getParentClassLoader();
}
public void setClassLoader(ClassLoader loader) {
this.loader = loader;
}
public ClassLoader getJspLoader() {
if( jspLoader == null ) {
jspLoader = new JasperLoader
(new URL[] {baseUrl},
getClassLoader(),
rctxt.getPermissionCollection());
}
return jspLoader;
}
public void clearJspLoader() {
jspLoader = null;
}
/** ---------- Input/Output ---------- */
/**
* The output directory to generate code into. The output directory
* is make up of the scratch directory, which is provide in Options,
* plus the directory derived from the package name.
* @return the output directory in which the generated sources are placed
*/
public String getOutputDir() {
if (outputDir == null) {
createOutputDir();
}
return outputDir;
}
/**
* Create a "Compiler" object based on some init param data. This
* is not done yet. Right now we're just hardcoding the actual
* compilers that are created.
* @return the Java compiler wrapper
*/
public Compiler createCompiler() {
if (jspCompiler != null ) {
return jspCompiler;
}
jspCompiler = null;
if (options.getCompilerClassName() != null) {
jspCompiler = createCompiler(options.getCompilerClassName());
} else {
if (options.getCompiler() == null) {
jspCompiler = createCompiler("org.apache.jasper.compiler.JDTCompiler");
if (jspCompiler == null) {
jspCompiler = createCompiler("org.apache.jasper.compiler.AntCompiler");
}
} else {
jspCompiler = createCompiler("org.apache.jasper.compiler.AntCompiler");
if (jspCompiler == null) {
jspCompiler = createCompiler("org.apache.jasper.compiler.JDTCompiler");
}
}
}
if (jspCompiler == null) {
throw new IllegalStateException(Localizer.getMessage("jsp.error.compiler.config",
options.getCompilerClassName(), options.getCompiler()));
}
jspCompiler.init(this, jsw);
return jspCompiler;
}
protected Compiler createCompiler(String className) {
Compiler compiler = null;
try {
compiler = (Compiler) Class.forName(className).newInstance();
} catch (InstantiationException e) {
log.warn(Localizer.getMessage("jsp.error.compiler"), e);
} catch (IllegalAccessException e) {
log.warn(Localizer.getMessage("jsp.error.compiler"), e);
} catch (NoClassDefFoundError e) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.error.compiler"), e);
}
} catch (ClassNotFoundException e) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.error.compiler"), e);
}
}
return compiler;
}
public Compiler getCompiler() {
return jspCompiler;
}
/** ---------- Access resources in the webapp ---------- */
/**
* Get the full value of a URI relative to this compilations context
* uses current file as the base.
* @param uri The relative URI
* @return absolute URI
*/
public String resolveRelativeUri(String uri) {
// sometimes we get uri's massaged from File(String), so check for
// a root directory separator char
if (uri.startsWith("/") || uri.startsWith(File.separator)) {
return uri;
} else {
return baseURI + uri;
}
}
/**
* Gets a resource as a stream, relative to the meanings of this
* context's implementation.
* @param res the resource to look for
* @return a null if the resource cannot be found or represented
* as an InputStream.
*/
public java.io.InputStream getResourceAsStream(String res) {
return context.getResourceAsStream(canonicalURI(res));
}
public URL getResource(String res) throws MalformedURLException {
return context.getResource(canonicalURI(res));
}
public Set<String> getResourcePaths(String path) {
return context.getResourcePaths(canonicalURI(path));
}
/**
* Gets the actual path of a URI relative to the context of
* the compilation.
* @param path The webapp path
* @return the corresponding path in the filesystem
*/
public String getRealPath(String path) {
if (context != null) {
return context.getRealPath(path);
}
return path;
}
/**
* Returns the JAR file in which the tag file for which this
* JspCompilationContext was created is packaged, or null if this
* JspCompilationContext does not correspond to a tag file, or if the
* corresponding tag file is not packaged in a JAR.
* @return a JAR file
*/
public Jar getTagFileJar() {
return this.tagJar;
}
public void setTagFileJar(Jar tagJar) {
this.tagJar = tagJar;
}
/* ==================== Common implementation ==================== */
/**
* Just the class name (does not include package name) of the
* generated class.
* @return the class name
*/
public String getServletClassName() {
if (className != null) {
return className;
}
if (isTagFile) {
className = tagInfo.getTagClassName();
int lastIndex = className.lastIndexOf('.');
if (lastIndex != -1) {
className = className.substring(lastIndex + 1);
}
} else {
int iSep = jspUri.lastIndexOf('/') + 1;
className = JspUtil.makeJavaIdentifier(jspUri.substring(iSep));
}
return className;
}
public void setServletClassName(String className) {
this.className = className;
}
/**
* Path of the JSP URI. Note that this is not a file name. This is
* the context rooted URI of the JSP file.
* @return the path to the JSP
*/
public String getJspFile() {
return jspUri;
}
public Long getLastModified(String resource) {
return getLastModified(resource, tagJar);
}
public Long getLastModified(String resource, Jar tagJar) {
long result = -1;
URLConnection uc = null;
try {
if (tagJar != null) {
if (resource.startsWith("/")) {
resource = resource.substring(1);
}
result = tagJar.getLastModified(resource);
} else {
URL jspUrl = getResource(resource);
if (jspUrl == null) {
incrementRemoved();
return Long.valueOf(result);
}
uc = jspUrl.openConnection();
if (uc instanceof JarURLConnection) {
JarEntry jarEntry = ((JarURLConnection) uc).getJarEntry();
if (jarEntry != null) {
result = jarEntry.getTime();
} else {
result = uc.getLastModified();
}
} else {
result = uc.getLastModified();
}
}
} catch (IOException e) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage(
"jsp.error.lastModified", getJspFile()), e);
}
result = -1;
} finally {
if (uc != null) {
try {
uc.getInputStream().close();
} catch (IOException e) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage(
"jsp.error.lastModified", getJspFile()), e);
}
result = -1;
}
}
}
return Long.valueOf(result);
}
public boolean isTagFile() {
return isTagFile;
}
public TagInfo getTagInfo() {
return tagInfo;
}
public void setTagInfo(TagInfo tagi) {
tagInfo = tagi;
}
/**
* @return <code>true</code> if we are compiling a tag file
* in prototype mode.
* ie we only generate codes with class for the tag handler with empty
* method bodies.
*/
public boolean isPrototypeMode() {
return protoTypeMode;
}
public void setPrototypeMode(boolean pm) {
protoTypeMode = pm;
}
/**
* Package name for the generated class is make up of the base package
* name, which is user settable, and the derived package name. The
* derived package name directly mirrors the file hierarchy of the JSP page.
* @return the package name
*/
public String getServletPackageName() {
if (isTagFile()) {
String className = tagInfo.getTagClassName();
int lastIndex = className.lastIndexOf('.');
String pkgName = "";
if (lastIndex != -1) {
pkgName = className.substring(0, lastIndex);
}
return pkgName;
} else {
String dPackageName = getDerivedPackageName();
if (dPackageName.length() == 0) {
return basePackageName;
}
return basePackageName + '.' + getDerivedPackageName();
}
}
protected String getDerivedPackageName() {
if (derivedPackageName == null) {
// Pax-Web enhanced-on
String usedJspUri;
try {
usedJspUri = new URI(this.jspUri).getPath();
if (usedJspUri == null) {
usedJspUri = "";
}
} catch (URISyntaxException e) {
usedJspUri = this.jspUri;
}
// Pax-Web enhanced-off
int iSep = jspUri.lastIndexOf('/');
derivedPackageName = (iSep > 0) ?
JspUtil.makeJavaPackage(jspUri.substring(1,iSep)) : "";
}
return derivedPackageName;
}
/**
* The package name into which the servlet class is generated.
* @param servletPackageName The package name to use
*/
public void setServletPackageName(String servletPackageName) {
this.basePackageName = servletPackageName;
}
/**
* @return Full path name of the Java file into which the servlet is being
* generated.
*/
public String getServletJavaFileName() {
if (servletJavaFileName == null) {
servletJavaFileName = getOutputDir() + getServletClassName() + ".java";
}
return servletJavaFileName;
}
/**
* @return the Options object for this context.
*/
public Options getOptions() {
return options;
}
public ServletContext getServletContext() {
return context;
}
public JspRuntimeContext getRuntimeContext() {
return rctxt;
}
/**
* @return the path of the Java file relative to the work directory.
*/
public String getJavaPath() {
if (javaPath != null) {
return javaPath;
}
if (isTagFile()) {
String tagName = tagInfo.getTagClassName();
javaPath = tagName.replace('.', '/') + ".java";
} else {
javaPath = getServletPackageName().replace('.', '/') + '/' +
getServletClassName() + ".java";
}
return javaPath;
}
public String getClassFileName() {
if (classFileName == null) {
classFileName = getOutputDir() + getServletClassName() + ".class";
}
return classFileName;
}
/**
* @return the writer that is used to write the generated Servlet source.
*/
public ServletWriter getWriter() {
return writer;
}
public void setWriter(ServletWriter writer) {
this.writer = writer;
}
/**
* Gets the 'location' of the TLD associated with the given taglib 'uri'.
* @param uri The taglib URI
* @return An array of two Strings: The first element denotes the real
* path to the TLD. If the path to the TLD points to a jar file, then the
* second element denotes the name of the TLD entry in the jar file.
* Returns null if the given uri is not associated with any tag library
* 'exposed' in the web application.
*/
public TldResourcePath getTldResourcePath(String uri) {
return getOptions().getTldCache().getTldResourcePath(uri);
}
/**
* @return <code>true</code> if generated code is kept.
*/
public boolean keepGenerated() {
return getOptions().getKeepGenerated();
}
// ==================== Removal ====================
public void incrementRemoved() {
if (removed == false && rctxt != null) {
rctxt.removeWrapper(jspUri);
}
removed = true;
}
public boolean isRemoved() {
return removed;
}
// ==================== Compile and reload ====================
public void compile() throws JasperException, FileNotFoundException {
createCompiler();
if (jspCompiler.isOutDated()) {
if (isRemoved()) {
throw new FileNotFoundException(jspUri);
}
try {
jspCompiler.removeGeneratedFiles();
jspLoader = null;
jspCompiler.compile();
jsw.setReload(true);
jsw.setCompilationException(null);
} catch (JasperException ex) {
// Cache compilation exception
jsw.setCompilationException(ex);
if (options.getDevelopment() && options.getRecompileOnFail()) {
// Force a recompilation attempt on next access
jsw.setLastModificationTest(-1);
}
throw ex;
} catch (FileNotFoundException fnfe) {
// Re-throw to let caller handle this - will result in a 404
throw fnfe;
} catch (Exception ex) {
JasperException je = new JasperException(
Localizer.getMessage("jsp.error.unable.compile"),
ex);
// Cache compilation exception
jsw.setCompilationException(je);
throw je;
}
}
}
// ==================== Manipulating the class ====================
public Class<?> load() throws JasperException {
try {
getJspLoader();
String name = getFQCN();
servletClass = jspLoader.loadClass(name);
} catch (ClassNotFoundException cex) {
throw new JasperException(Localizer.getMessage("jsp.error.unable.load"),
cex);
} catch (Exception ex) {
throw new JasperException(Localizer.getMessage("jsp.error.unable.compile"),
ex);
}
removed = false;
return servletClass;
}
public String getFQCN() {
String name;
if (isTagFile()) {
name = tagInfo.getTagClassName();
} else {
name = getServletPackageName() + "." + getServletClassName();
}
return name;
}
// ==================== protected methods ====================
private static final Object outputDirLock = new Object();
public void checkOutputDir() {
if (outputDir != null) {
if (!(new File(outputDir)).exists()) {
makeOutputDir();
}
} else {
createOutputDir();
}
}
protected boolean makeOutputDir() {
synchronized(outputDirLock) {
File outDirFile = new File(outputDir);
return (outDirFile.mkdirs() || outDirFile.isDirectory());
}
}
protected void createOutputDir() {
String path = null;
if (isTagFile()) {
String tagName = tagInfo.getTagClassName();
path = tagName.replace('.', File.separatorChar);
path = path.substring(0, path.lastIndexOf(File.separatorChar));
} else {
path = getServletPackageName().replace('.',File.separatorChar);
}
// Append servlet or tag handler path to scratch dir
try {
File base = options.getScratchDir();
baseUrl = base.toURI().toURL();
outputDir = base.getAbsolutePath() + File.separator + path +
File.separator;
if (!makeOutputDir()) {
throw new IllegalStateException(Localizer.getMessage("jsp.error.outputfolder"));
}
} catch (MalformedURLException e) {
throw new IllegalStateException(Localizer.getMessage("jsp.error.outputfolder"), e);
}
}
protected static final boolean isPathSeparator(char c) {
return (c == '/' || c == '\\');
}
protected static final String canonicalURI(String s) {
if (s == null) {
return null;
}
StringBuilder result = new StringBuilder();
final int len = s.length();
int pos = 0;
while (pos < len) {
char c = s.charAt(pos);
if ( isPathSeparator(c) ) {
/*
* multiple path separators.
* 'foo///bar' -> 'foo/bar'
*/
while (pos+1 < len && isPathSeparator(s.charAt(pos+1))) {
++pos;
}
if (pos+1 < len && s.charAt(pos+1) == '.') {
/*
* a single dot at the end of the path - we are done.
*/
if (pos+2 >= len) {
break;
}
switch (s.charAt(pos+2)) {
/*
* self directory in path
* foo/./bar -> foo/bar
*/
case '/':
case '\\':
pos += 2;
continue;
/*
* two dots in a path: go back one hierarchy.
* foo/bar/../baz -> foo/baz
*/
case '.':
// only if we have exactly _two_ dots.
if (pos+3 < len && isPathSeparator(s.charAt(pos+3))) {
pos += 3;
int separatorPos = result.length()-1;
while (separatorPos >= 0 &&
! isPathSeparator(result
.charAt(separatorPos))) {
--separatorPos;
}
if (separatorPos >= 0) {
result.setLength(separatorPos);
}
continue;
}
}
}
}
result.append(c);
++pos;
}
return result.toString();
}
}
| |
/*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.emm.agent;
import java.util.Map;
import org.wso2.emm.agent.api.DeviceInfo;
import org.wso2.emm.agent.beans.ServerConfig;
import org.wso2.emm.agent.proxy.interfaces.APIResultCallBack;
import org.wso2.emm.agent.proxy.utils.Constants.HTTP_METHODS;
import org.wso2.emm.agent.services.DeviceInfoPayload;
import org.wso2.emm.agent.utils.CommonDialogUtils;
import org.wso2.emm.agent.utils.Constants;
import org.wso2.emm.agent.utils.Preference;
import org.wso2.emm.agent.utils.CommonUtils;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Resources;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
/**
* Activity which handles user enrollment.
*/
public class RegistrationActivity extends Activity implements APIResultCallBack {
private Context context;
private ProgressDialog progressDialog;
private AlertDialog.Builder alertDialog;
private DeviceInfoPayload deviceInfoBuilder;
private Resources resources;
private String deviceIdentifier;
private String TAG = RegistrationActivity.class.getSimpleName();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
context = this;
deviceInfoBuilder = new DeviceInfoPayload(context);
resources = context.getResources();
DeviceInfo deviceInfo = new DeviceInfo(context);
deviceIdentifier = deviceInfo.getDeviceId();
Preference.putString(context, resources.getString(R.string.shared_pref_regId), deviceIdentifier);
registerDevice();
}
private void registerDevice() {
progressDialog = CommonDialogUtils.showPrgressDialog(RegistrationActivity.this,
getResources().getString(R.string.dialog_enrolling),
getResources().getString(R.string.dialog_please_wait),
null);
progressDialog.show();
String type = Preference.getString(context,
context.getResources().getString(R.string.shared_pref_reg_type));
String username = Preference.getString(context,
context.getResources().getString(R.string.username));
try {
deviceInfoBuilder.build(type, username);
} catch (AndroidAgentException e) {
Log.e(TAG, "Error occurred while building the device info payload.", e);
}
// Check network connection availability before calling the API.
if (CommonUtils.isNetworkAvailable(context)) {
// Call device registration API.
String ipSaved = Preference.getString(context.getApplicationContext(),Constants.IP);
ServerConfig utils = new ServerConfig();
utils.setServerIP(ipSaved);
CommonUtils.callSecuredAPI(RegistrationActivity.this,
utils.getAPIServerURL() + Constants.REGISTER_ENDPOINT,
HTTP_METHODS.POST,
deviceInfoBuilder.getDeviceInfoPayload(),
RegistrationActivity.this,
Constants.REGISTER_REQUEST_CODE);
} else {
CommonDialogUtils.stopProgressDialog(progressDialog);
CommonDialogUtils.showNetworkUnavailableMessage(RegistrationActivity.this);
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
Intent i = new Intent();
i.setAction(Intent.ACTION_MAIN);
i.addCategory(Intent.CATEGORY_HOME);
this.startActivity(i);
finish();
return true;
} else if (keyCode == KeyEvent.KEYCODE_HOME) {
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
private DialogInterface.OnClickListener registrationFailedOKBtnClickListerner =
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface arg0,
int arg1) {
loadAuthenticationErrorActivity();
}
};
/**
* Loads Already registered activity.
*/
private void loadAlreadyRegisteredActivity(){
Intent intent =
new Intent(RegistrationActivity.this,
AlreadyRegisteredActivity.class);
intent.putExtra(getResources().getString(R.string.intent_extra_fresh_reg_flag), true);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
}
/**
* Display connectivity error.
*/
private void displayConnectionError(){
alertDialog = CommonDialogUtils.getAlertDialogWithOneButtonAndTitle(context,
getResources().getString(R.string.title_head_connection_error),
getResources().getString(R.string.error_internal_server),
getResources().getString(R.string.button_ok),
registrationFailedOKBtnClickListerner);
alertDialog.show();
}
/**
* Display internal server error.
*/
private void displayInternalServerError(){
alertDialog = CommonDialogUtils.getAlertDialogWithOneButtonAndTitle(context,
getResources().getString(R.string.title_head_registration_error),
getResources().getString(R.string.error_for_all_unknown_registration_failures),
getResources().getString(R.string.button_ok),
registrationFailedOKBtnClickListerner);
}
@Override
public void onReceiveAPIResult(Map<String, String> result, int requestCode) {
CommonDialogUtils.stopProgressDialog(progressDialog);
DeviceInfo info = new DeviceInfo(context);
if (Constants.REGISTER_REQUEST_CODE == requestCode) {
String responseStatus;
if (result != null) {
responseStatus = result.get(Constants.STATUS);
Preference.putString(context, resources.getString(R.string.shared_pref_regId), info.getDeviceId());
if (Constants.Status.SUCCESSFUL.equals(responseStatus)) {
if (Preference.getString(context, context.getResources().
getString(R.string.shared_pref_notifier)).trim().equals(Constants.NOTIFIER_GCM)) {
registerGCM();
} else {
getEffectivePolicy();
}
} else {
displayInternalServerError();
}
} else {
displayConnectionError();
}
} else if (Constants.POLICY_REQUEST_CODE == requestCode) {
loadAlreadyRegisteredActivity();
}
}
/**
* This will start the GCM flow by registering the device with Google and sending the
* registration ID to MDM. This is done in a Async task as a network call may be done, and
* it should be done out side the UI thread. After retrieving the registration Id, it is send
* to the MDM server so that it can send notifications to the device.
*/
private void registerGCM() {
new AsyncTask<Void, Void, String>() {
String senderId = Preference.getString(context, context.getResources().getString(R.string.shared_pref_sender_id));
GCMRegistrationManager registrationManager = new GCMRegistrationManager(RegistrationActivity.this, senderId);
@Override
protected String doInBackground(Void... params) {
return registrationManager.registerWithGoogle();
}
@Override
protected void onPostExecute(String regId) {
Preference.putString(context, Constants.REG_ID, regId);
if (regId != null) {
try {
registrationManager.sendRegistrationId();
} catch (AndroidAgentException e) {
Log.e(TAG, "Error while sending registration Id");
}
} else {
try {
CommonUtils.clearAppData(context);
displayInternalServerError();
} catch (AndroidAgentException e) {
Log.e(TAG, "Failed to clear app data", e);
}
}
}
}.execute();
getEffectivePolicy();
}
/**
* Loads Authentication error activity.
*/
private void loadAuthenticationErrorActivity() {
Preference.putString(context, Constants.IP, Constants.EMPTY_STRING);
Intent intent = new Intent(
RegistrationActivity.this,
ServerDetails.class);
intent.putExtra(getResources().getString(R.string.intent_extra_from_activity),
RegistrationActivity.class.getSimpleName());
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
finish();
}
/**
* This method is used to invoke getEffectivePolicy in the backend
*/
private void getEffectivePolicy() {
if (CommonUtils.isNetworkAvailable(context)) {
String ipSaved =
Preference.getString(context.getApplicationContext(), Constants.IP);
ServerConfig utils = new ServerConfig();
utils.setServerIP(ipSaved);
CommonUtils.callSecuredAPI(RegistrationActivity.this,
utils.getAPIServerURL() + Constants.POLICY_ENDPOINT + deviceIdentifier,
HTTP_METHODS.GET,
null,
RegistrationActivity.this,
Constants.POLICY_REQUEST_CODE);
} else {
CommonDialogUtils.stopProgressDialog(progressDialog);
CommonDialogUtils.showNetworkUnavailableMessage(RegistrationActivity.this);
}
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.googlejavaformat.java;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.io.ByteStreams;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.ProcessBuilder.Redirect;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermission;
import java.util.EnumSet;
import java.util.Locale;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link Main}. */
@RunWith(JUnit4.class)
public class MainTest {
@Rule public TemporaryFolder testFolder = new TemporaryFolder();
// PrintWriter instances used below are hard-coded to use system-default line separator.
private final Joiner joiner = Joiner.on(System.lineSeparator());
@Test
public void testUsageOutput() {
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in);
try {
main.format("--help");
throw new AssertionError("Expected UsageException to be thrown");
} catch (UsageException e) {
String usage = e.getMessage();
// Check that doc links are included.
assertThat(usage).contains("https://github.com/google/google-java-format");
assertThat(usage).contains("Usage: google-java-format");
// Sanity check that a flag and description is in included.
assertThat(usage).contains("--length");
assertThat(usage).contains("Character length to format.");
// Check that some of the additional text is included.
assertThat(usage).contains("the result is sent to stdout");
}
}
@Test
public void version() throws UsageException {
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in);
assertThat(main.format("-version")).isEqualTo(0);
assertThat(err.toString()).contains("google-java-format: Version ");
}
@Test
public void preserveOriginalFile() throws Exception {
Path path = testFolder.newFile("Test.java").toPath();
Files.write(path, "class Test {}\n".getBytes(UTF_8));
try {
Files.setPosixFilePermissions(path, EnumSet.of(PosixFilePermission.OWNER_READ));
} catch (UnsupportedOperationException e) {
return;
}
Main main =
new Main(
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out, UTF_8)), true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
System.in);
int errorCode = main.format("-replace", path.toAbsolutePath().toString());
assertWithMessage("Error Code").that(errorCode).isEqualTo(0);
}
@Test
public void testMain() throws Exception {
Process process =
new ProcessBuilder(
ImmutableList.of(
Paths.get(System.getProperty("java.home")).resolve("bin/java").toString(),
"-cp",
System.getProperty("java.class.path"),
Main.class.getName()))
.redirectError(Redirect.PIPE)
.redirectOutput(Redirect.PIPE)
.start();
process.waitFor();
String err = new String(ByteStreams.toByteArray(process.getErrorStream()), UTF_8);
assertThat(err).contains("Usage: google-java-format");
assertThat(process.exitValue()).isEqualTo(0);
}
// end to end javadoc formatting test
@Test
public void javadoc() throws Exception {
String[] input = {
"/**",
" * graph",
" *",
" * graph",
" *",
" * @param foo lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do"
+ " eiusmod tempor incididunt ut labore et dolore magna aliqua",
" */",
"class Test {",
" /**",
" * creates entropy",
" */",
" public static void main(String... args) {}",
"}",
};
String[] expected = {
"/**",
" * graph",
" *",
" * <p>graph",
" *",
" * @param foo lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do"
+ " eiusmod tempor",
" * incididunt ut labore et dolore magna aliqua",
" */",
"class Test {",
" /** creates entropy */",
" public static void main(String... args) {}",
"}",
"",
};
InputStream in = new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8));
StringWriter out = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
in);
assertThat(main.format("-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(expected));
}
// end to end import fixing test
@Test
public void imports() throws Exception {
String[] input = {
"import java.util.LinkedList;",
"import java.util.List;",
"import java.util.ArrayList;",
"class Test {",
" /**",
" * May be an {@link ArrayList}.",
" */",
" public static List<String> names;",
"}",
};
String[] expected = {
"import java.util.ArrayList;",
"import java.util.List;",
"",
"class Test {",
" /**",
" * May be an {@link ArrayList}.",
" */",
" public static List<String> names;",
"}",
};
InputStream in = new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8));
StringWriter out = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
in);
assertThat(main.format("-", "--fix-imports-only")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(expected));
}
@Test
public void optimizeImportsDoesNotLeaveEmptyLines() throws Exception {
String[] input = {
"package abc;",
"",
"import java.util.LinkedList;",
"import java.util.List;",
"import java.util.ArrayList;",
"",
"import static java.nio.charset.StandardCharsets.UTF_8;",
"",
"import java.util.EnumSet;",
"",
"class Test ",
"extends ArrayList {",
"}"
};
String[] expected = {
"package abc;", //
"",
"import java.util.ArrayList;",
"",
"class Test extends ArrayList {}",
""
};
// pre-check expectation with local formatter instance
String optimized = new Formatter().formatSourceAndFixImports(joiner.join(input));
assertThat(optimized).isEqualTo(joiner.join(expected));
InputStream in = new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8));
StringWriter out = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
in);
assertThat(main.format("-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(expected));
}
// test that -lines handling works with import removal
@Test
public void importRemovalLines() throws Exception {
String[] input = {
"import java.util.ArrayList;",
"import java.util.List;",
"class Test {",
"ArrayList<String> a = new ArrayList<>();",
"ArrayList<String> b = new ArrayList<>();",
"}",
};
String[] expected = {
"import java.util.ArrayList;",
"",
"class Test {",
" ArrayList<String> a = new ArrayList<>();",
"ArrayList<String> b = new ArrayList<>();",
"}",
};
StringWriter out = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8)));
assertThat(main.format("-", "-lines", "4")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(expected));
}
// test that errors are reported on the right line when imports are removed
@Test
public void importRemoveErrorParseError() throws Exception {
Locale backupLocale = Locale.getDefault();
try {
Locale.setDefault(Locale.ROOT);
String[] input = {
"import java.util.ArrayList;", //
"import java.util.List;",
"class Test {",
"}}",
};
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8)));
assertThat(main.format("-")).isEqualTo(1);
assertThat(err.toString()).contains("<stdin>:4:3: error: class, interface");
} finally {
Locale.setDefault(backupLocale);
}
}
@Test
public void packageInfo() throws Exception {
String[] input = {
"@CheckReturnValue",
"@ParametersAreNonnullByDefault",
"package com.google.common.labs.base;",
"",
"import javax.annotation.CheckReturnValue;",
"import javax.annotation.ParametersAreNonnullByDefault;",
"",
};
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8)));
assertThat(main.format("-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(input));
}
@Test
public void newline() throws Exception {
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream("class T {}\n\t".getBytes(UTF_8)));
assertThat(main.format("-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo("class T {}\n");
}
@Test
public void dryRunStdinUnchanged() throws Exception {
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream("class Test {}\n".getBytes(UTF_8)));
assertThat(main.format("-n", "-")).isEqualTo(0);
assertThat(out.toString()).isEmpty();
assertThat(err.toString()).isEmpty();
}
@Test
public void dryRunStdinChanged() throws Exception {
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
String input = "class Test {\n}\n";
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream(input.getBytes(UTF_8)));
assertThat(main.format("-n", "-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo("<stdin>" + System.lineSeparator());
assertThat(err.toString()).isEmpty();
}
@Test
public void dryRunFiles() throws Exception {
Path a = testFolder.newFile("A.java").toPath();
Path b = testFolder.newFile("B.java").toPath();
Path c = testFolder.newFile("C.java").toPath();
Files.write(a, "class A {}\n".getBytes(UTF_8));
Files.write(b, "class B {\n}\n".getBytes(UTF_8));
Files.write(c, "class C {\n}\n".getBytes(UTF_8));
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in);
int exitCode =
main.format(
"-n",
a.toAbsolutePath().toAbsolutePath().toString(),
b.toAbsolutePath().toString(),
c.toAbsolutePath().toString());
assertThat(exitCode).isEqualTo(0);
assertThat(out.toString())
.isEqualTo(
b.toAbsolutePath().toString()
+ System.lineSeparator()
+ c.toAbsolutePath().toString()
+ System.lineSeparator());
assertThat(err.toString()).isEmpty();
}
@Test
public void keepGoingWhenFilesDontExist() throws Exception {
Path a = testFolder.newFile("A.java").toPath();
Path b = testFolder.newFile("B.java").toPath();
File cFile = testFolder.newFile("C.java");
Path c = cFile.toPath();
cFile.delete();
Files.write(a, "class A{}\n".getBytes(UTF_8));
Files.write(b, "class B{}\n".getBytes(UTF_8));
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in);
int exitCode =
main.format(
"",
a.toAbsolutePath().toString(),
c.toAbsolutePath().toString(),
b.toAbsolutePath().toString());
// Formatter returns failure if a file was not present.
assertThat(exitCode).isEqualTo(1);
// Present files were correctly formatted.
assertThat(out.toString()).isEqualTo("class A {}\nclass B {}\n");
// File not found still showed error.
assertThat(err.toString()).isNotEmpty();
}
@Test
public void exitIfChangedStdin() throws Exception {
Path path = testFolder.newFile("Test.java").toPath();
Files.write(path, "class Test {\n}\n".getBytes(UTF_8));
Process process =
new ProcessBuilder(
ImmutableList.of(
Paths.get(System.getProperty("java.home")).resolve("bin/java").toString(),
"-cp",
System.getProperty("java.class.path"),
Main.class.getName(),
"-n",
"--set-exit-if-changed",
"-"))
.redirectInput(path.toFile())
.redirectError(Redirect.PIPE)
.redirectOutput(Redirect.PIPE)
.start();
process.waitFor();
String err = new String(ByteStreams.toByteArray(process.getErrorStream()), UTF_8);
String out = new String(ByteStreams.toByteArray(process.getInputStream()), UTF_8);
assertThat(err).isEmpty();
assertThat(out).isEqualTo("<stdin>" + System.lineSeparator());
assertThat(process.exitValue()).isEqualTo(1);
}
@Test
public void exitIfChangedFiles() throws Exception {
Path path = testFolder.newFile("Test.java").toPath();
Files.write(path, "class Test {\n}\n".getBytes(UTF_8));
Process process =
new ProcessBuilder(
ImmutableList.of(
Paths.get(System.getProperty("java.home")).resolve("bin/java").toString(),
"-cp",
System.getProperty("java.class.path"),
Main.class.getName(),
"-n",
"--set-exit-if-changed",
path.toAbsolutePath().toString()))
.redirectError(Redirect.PIPE)
.redirectOutput(Redirect.PIPE)
.start();
process.waitFor();
String err = new String(ByteStreams.toByteArray(process.getErrorStream()), UTF_8);
String out = new String(ByteStreams.toByteArray(process.getInputStream()), UTF_8);
assertThat(err).isEmpty();
assertThat(out).isEqualTo(path.toAbsolutePath().toString() + System.lineSeparator());
assertThat(process.exitValue()).isEqualTo(1);
}
@Test
public void assumeFilename_error() throws Exception {
String[] input = {
"class Test {}}",
};
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8)));
assertThat(main.format("--assume-filename=Foo.java", "-")).isEqualTo(1);
assertThat(err.toString()).contains("Foo.java:1:15: error: class, interface");
}
@Test
public void assumeFilename_dryRun() throws Exception {
String[] input = {
"class Test {", //
"}",
};
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8)));
assertThat(main.format("--dry-run", "--assume-filename=Foo.java", "-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo("Foo.java" + System.lineSeparator());
}
@Test
public void reflowLongStrings() throws Exception {
String[] input = {
"class T {", //
" String s = \"one long incredibly unbroken sentence moving from topic to topic so that no"
+ " one had a chance to interrupt\";",
"}"
};
String[] expected = {
"class T {",
" String s =",
" \"one long incredibly unbroken sentence moving from topic to topic so that no one had"
+ " a\"",
" + \" chance to interrupt\";",
"}",
"",
};
InputStream in = new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8));
StringWriter out = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
in);
assertThat(main.format("-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(expected));
}
@Test
public void noReflowLongStrings() throws Exception {
String[] input = {
"class T {", //
" String s = \"one long incredibly unbroken sentence moving from topic to topic so that no"
+ " one had a chance to interrupt\";",
"}"
};
String[] expected = {
"class T {",
" String s =",
" \"one long incredibly unbroken sentence moving from topic to topic so that no one had"
+ " a chance to interrupt\";",
"}",
"",
};
InputStream in = new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8));
StringWriter out = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
in);
assertThat(main.format("--skip-reflowing-long-strings", "-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(expected));
}
@Test
public void noFormatJavadoc() throws Exception {
String[] input = {
"/**",
" * graph",
" *",
" * graph",
" *",
" * @param foo lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do"
+ " eiusmod tempor incididunt ut labore et dolore magna aliqua",
" */",
"class Test {",
" /**",
" * creates entropy",
" */",
" public static void main(String... args) {}",
"}",
"",
};
InputStream in = new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8));
StringWriter out = new StringWriter();
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true),
in);
assertThat(main.format("--skip-javadoc-formatting", "-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo(joiner.join(input));
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.progress.util;
import com.intellij.concurrency.SensitiveProgressWrapper;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.components.ComponentManager;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.util.Consumer;
import com.intellij.util.Function;
import com.intellij.util.ObjectUtils;
import com.intellij.util.PairConsumer;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.concurrency.annotations.RequiresEdt;
import com.intellij.util.messages.MessageBus;
import com.intellij.util.messages.Topic;
import org.jetbrains.annotations.CalledInAny;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
public final class BackgroundTaskUtil {
private static final Logger LOG = Logger.getInstance(BackgroundTaskUtil.class);
@RequiresEdt
public static @NotNull ProgressIndicator executeAndTryWait(@NotNull Function<? super ProgressIndicator, /*@NotNull*/ ? extends Runnable> backgroundTask,
@Nullable Runnable onSlowAction) {
return executeAndTryWait(backgroundTask, onSlowAction, ProgressWindow.DEFAULT_PROGRESS_DIALOG_POSTPONE_TIME_MILLIS, false);
}
/**
* Executor to perform <i>possibly</i> long operation on pooled thread.
* If computation was performed within given time frame,
* the computed callback will be executed synchronously (avoiding unnecessary <tt>invokeLater()</tt>).
* In this case, {@code onSlowAction} will not be executed at all.
* <ul>
* <li> If the computation is fast, execute callback synchronously.
* <li> If the computation is slow, execute <tt>onSlowAction</tt> synchronously. When the computation is completed, execute callback in EDT.
* </ul><p>
* It can be used to reduce blinking when background task might be completed fast.<br>
* A Simple approach:
* <pre>
* onSlowAction.run() // show "Loading..."
* executeOnPooledThread({
* Runnable callback = backgroundTask(); // some background computations
* invokeLater(callback); // apply changes
* });
* </pre>
* will lead to "Loading..." visible between current moment and execution of invokeLater() event.
* This period can be very short and looks like 'jumping' if background operation is fast.
*/
@RequiresEdt
public static @NotNull ProgressIndicator executeAndTryWait(@NotNull Function<? super ProgressIndicator, /*@NotNull*/ ? extends Runnable> backgroundTask,
@Nullable Runnable onSlowAction,
long waitMillis,
boolean forceEDT) {
ModalityState modality = ModalityState.current();
if (forceEDT) {
ProgressIndicator indicator = new EmptyProgressIndicator(modality);
try {
Runnable callback = ProgressManager.getInstance().runProcess(() -> backgroundTask.fun(indicator), indicator);
finish(callback, indicator);
}
catch (ProcessCanceledException ignore) {
}
catch (Throwable t) {
LOG.error(t);
}
return indicator;
}
else {
Pair<Runnable, ProgressIndicator> pair = computeInBackgroundAndTryWait(
backgroundTask,
(callback, indicator) -> ApplicationManager.getApplication().invokeLater(() -> finish(callback, indicator), modality),
modality,
waitMillis);
Runnable callback = pair.first;
ProgressIndicator indicator = pair.second;
if (callback != null) {
finish(callback, indicator);
}
else {
if (onSlowAction != null) onSlowAction.run();
}
return indicator;
}
}
@RequiresEdt
private static void finish(@NotNull Runnable result, @NotNull ProgressIndicator indicator) {
if (!indicator.isCanceled()) result.run();
}
/**
* Try to compute value in background and abort computation if it takes too long.
* <ul>
* <li> If the computation is fast, return computed value.
* <li> If the computation is slow, abort computation (cancel ProgressIndicator).
* </ul>
*/
@CalledInAny
public static @Nullable <T> T tryComputeFast(@NotNull Function<? super ProgressIndicator, ? extends T> backgroundTask,
long waitMillis) {
Pair<T, ProgressIndicator> pair = computeInBackgroundAndTryWait(
backgroundTask,
(result, indicator) -> {
},
ModalityState.defaultModalityState(),
waitMillis);
T result = pair.first;
ProgressIndicator indicator = pair.second;
indicator.cancel();
return result;
}
@CalledInAny
public static @Nullable <T> T computeInBackgroundAndTryWait(@NotNull Computable<? extends T> computable,
@NotNull Consumer<? super T> asyncCallback,
long waitMillis) {
Pair<T, ProgressIndicator> pair = computeInBackgroundAndTryWait(
indicator -> computable.compute(),
(result, indicator) -> asyncCallback.consume(result),
ModalityState.defaultModalityState(),
waitMillis
);
return pair.first;
}
/**
* Compute value in background and try wait for its completion.
* <ul>
* <li> If the computation is fast, return computed value synchronously. Callback will not be called in this case.
* <li> If the computation is slow, return <tt>null</tt>. When the computation is completed, pass the value to the callback.
* </ul>
* Callback will be executed on the same thread as the background task.
*/
@CalledInAny
private static @NotNull <T> Pair<T, ProgressIndicator> computeInBackgroundAndTryWait(@NotNull Function<? super ProgressIndicator, ? extends T> task,
@NotNull PairConsumer<? super T, ? super ProgressIndicator> asyncCallback,
@NotNull ModalityState modality,
long waitMillis) {
ProgressIndicator indicator = new EmptyProgressIndicator(modality);
indicator.start();
Helper<T> helper = new Helper<>();
ApplicationManager.getApplication().executeOnPooledThread(() -> ProgressManager.getInstance().runProcess(() -> {
T result = task.fun(indicator);
if (!helper.setResult(result)) {
asyncCallback.consume(result, indicator);
}
}, indicator));
T result = null;
if (helper.await(waitMillis)) {
result = helper.getResult();
}
return Pair.create(result, indicator);
}
/**
* An alternative to plain {@link Application#executeOnPooledThread(Runnable)} which wraps the task in a process with a
* {@link ProgressIndicator} which gets cancelled when the given disposable is disposed. <br/><br/>
*
* This allows to stop a lengthy background activity by calling {@link ProgressManager#checkCanceled()}
* and avoid Already Disposed exceptions (in particular, because checkCanceled() is called in {@link ServiceManager#getService(Class)}.
*/
@CalledInAny
public static @NotNull ProgressIndicator executeOnPooledThread(@NotNull Disposable parent, @NotNull Runnable runnable) {
return execute(AppExecutorUtil.getAppExecutorService(), parent, runnable);
}
/**
* Does tha same as {@link BackgroundTaskUtil#executeOnPooledThread(Disposable, Runnable)} method but allows to use
* custom {@link Executor} instance.
*/
@CalledInAny
public static @NotNull ProgressIndicator execute(@NotNull Executor executor, @NotNull Disposable parent, @NotNull Runnable runnable) {
ProgressIndicator indicator = new EmptyProgressIndicator();
indicator.start();
CompletableFuture<?> future = CompletableFuture.runAsync(() -> ProgressManager.getInstance().runProcess(runnable, indicator),
executor);
Disposable disposable = () -> {
if (indicator.isRunning()) indicator.cancel();
try {
future.get(1, TimeUnit.SECONDS);
}
catch (ExecutionException e) {
if (e.getCause() instanceof ProcessCanceledException) {
// ignore: expected cancellation
}
else {
LOG.error(e);
}
}
catch (InterruptedException | TimeoutException e) {
LOG.debug("Couldn't await background process on disposal: " + runnable);
}
};
if (!registerIfParentNotDisposed(parent, disposable)) {
indicator.cancel();
return indicator;
}
future.whenComplete((o, e) -> Disposer.dispose(disposable));
return indicator;
}
@CalledInAny
public static <T> T runUnderDisposeAwareIndicator(@NotNull Disposable parent, @NotNull Supplier<? extends T> task) {
Ref<T> ref = new Ref<>();
runUnderDisposeAwareIndicator(parent, () -> {
ref.set(task.get());
});
return ref.get();
}
@CalledInAny
public static void runUnderDisposeAwareIndicator(@NotNull Disposable parent, @NotNull Runnable task) {
runUnderDisposeAwareIndicator(parent, task, ProgressManager.getInstance().getProgressIndicator());
}
@CalledInAny
public static void runUnderDisposeAwareIndicator(@NotNull Disposable parent,
@NotNull Runnable task,
@Nullable ProgressIndicator parentIndicator) {
final ProgressIndicator indicator = parentIndicator == null
? new EmptyProgressIndicator(ModalityState.defaultModalityState())
: new SensitiveProgressWrapper(parentIndicator);
Disposable disposable = () -> {
if (indicator.isRunning()) {
indicator.cancel();
}
};
if (!registerIfParentNotDisposed(parent, disposable)) {
indicator.cancel();
throw new ProcessCanceledException();
}
try {
ProgressManager.getInstance().runProcess(task, indicator);
}
finally {
Disposer.dispose(disposable);
}
}
private static boolean registerIfParentNotDisposed(@NotNull Disposable parent, @NotNull Disposable disposable) {
if (parent instanceof ComponentManager && ((ComponentManager)parent).isDisposed()) {
return false;
}
return Disposer.tryRegister(parent, disposable);
}
/**
* Wraps {@link MessageBus#syncPublisher(Topic)} in a dispose check,
* and throws a {@link ProcessCanceledException} if the project is disposed,
* instead of throwing an assertion which would happen otherwise.
*
* @see #syncPublisher(Topic)
*/
@CalledInAny
public static @NotNull <L> L syncPublisher(@NotNull Project project, @NotNull Topic<L> topic) throws ProcessCanceledException {
return ReadAction.compute(() -> {
if (project.isDisposed()) {
throw new ProcessCanceledException();
}
return project.getMessageBus().syncPublisher(topic);
});
}
/**
* Wraps {@link MessageBus#syncPublisher(Topic)} in a dispose check,
* and throws a {@link ProcessCanceledException} if the application is disposed,
* instead of throwing an assertion which would happen otherwise.
*
* @see #syncPublisher(Project, Topic)
*/
@CalledInAny
public static @NotNull <L> L syncPublisher(@NotNull Topic<L> topic) throws ProcessCanceledException {
return ReadAction.compute(() -> {
if (ApplicationManager.getApplication().isDisposed()) throw new ProcessCanceledException();
return ApplicationManager.getApplication().getMessageBus().syncPublisher(topic);
});
}
private static class Helper<T> {
private static final Object INITIAL_STATE = ObjectUtils.sentinel("INITIAL_STATE");
private static final Object SLOW_OPERATION_STATE = ObjectUtils.sentinel("SLOW_OPERATION_STATE");
private final Semaphore mySemaphore = new Semaphore(0);
private final AtomicReference<Object> myResultRef = new AtomicReference<>(INITIAL_STATE);
/**
* @return true if computation was fast, and callback should be handled by other thread
*/
public boolean setResult(T result) {
boolean isFast = myResultRef.compareAndSet(INITIAL_STATE, result);
mySemaphore.release();
return isFast;
}
/**
* @return true if computation was fast, and callback should be handled by current thread
*/
public boolean await(long waitMillis) {
try {
mySemaphore.tryAcquire(waitMillis, TimeUnit.MILLISECONDS);
}
catch (InterruptedException ignore) {
}
return !myResultRef.compareAndSet(INITIAL_STATE, SLOW_OPERATION_STATE);
}
public T getResult() {
Object result = myResultRef.get();
assert result != INITIAL_STATE && result != SLOW_OPERATION_STATE;
//noinspection unchecked
return (T)result;
}
}
}
| |
/**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.index.sqlite;
import com.google.common.collect.*;
import com.googlecode.cqengine.attribute.SimpleAttribute;
import com.googlecode.cqengine.index.sqlite.support.DBQueries;
import com.googlecode.cqengine.index.support.CloseableIterable;
import com.googlecode.cqengine.index.support.KeyStatistics;
import com.googlecode.cqengine.index.support.KeyValue;
import com.googlecode.cqengine.persistence.support.ConcurrentOnHeapObjectStore;
import com.googlecode.cqengine.persistence.support.ObjectSet;
import com.googlecode.cqengine.persistence.support.ObjectStore;
import com.googlecode.cqengine.query.option.QueryOptions;
import com.googlecode.cqengine.query.simple.FilterQuery;
import com.googlecode.cqengine.resultset.ResultSet;
import com.googlecode.cqengine.testutil.Car;
import com.googlecode.cqengine.testutil.CarFactory;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.sqlite.SQLiteConfig;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.*;
import static com.googlecode.cqengine.query.QueryFactory.equal;
import static com.googlecode.cqengine.query.QueryFactory.noQueryOptions;
import static com.googlecode.cqengine.testutil.TestUtil.setOf;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Unit tests for {@link SQLiteIndex}
*
* @author Silvano Riz
*/
public class SQLiteIndexTest {
private static final String TABLE_NAME = "cqtbl_features";
private static final String INDEX_NAME = "cqidx_features_value";
public static final SimpleAttribute<Car, Integer> OBJECT_TO_ID = Car.CAR_ID;
public static final SimpleAttribute<Integer, Car> ID_TO_OBJECT = new SimpleAttribute<Integer, Car>("carFromId") {
public Car getValue(Integer carId, QueryOptions queryOptions) { return null; }
};
public static List<Car> data = Arrays.asList(
new Car(1, "Ford", "Focus", Car.Color.BLUE, 5, 9000.50, Arrays.asList("abs", "gps"), Collections.emptyList()),
new Car(2, "Honda", "Civic", Car.Color.RED, 5, 5000.00, Arrays.asList("airbags"), Collections.emptyList()),
new Car(3, "Toyota", "Prius", Car.Color.BLACK, 3, 9700.00, Arrays.asList("abs"), Collections.emptyList()),
new Car(4, "Fiat", "Panda", Car.Color.BLUE, 5, 5600.00, Collections.<String>emptyList(), Collections.emptyList()),
new Car(5, "Fiat", "Punto", Car.Color.BLUE, 5, 5600.00, Arrays.asList("gps"), Collections.emptyList())
);
@Rule
public TemporaryDatabase.TemporaryInMemoryDatabase temporaryInMemoryDatabase = new TemporaryDatabase.TemporaryInMemoryDatabase();
@Test
public void testNew1() throws Exception {
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = SQLiteIndex.onAttribute(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT
);
assertNotNull(carFeaturesOffHeapIndex);
}
@Test
public void testNew2() throws Exception {
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"_tableSuffix"
);
assertNotNull(carFeaturesOffHeapIndex);
assertEquals("features_tableSuffix", carFeaturesOffHeapIndex.tableName);
}
@Test
public void testGetConnectionManager(){
ConnectionManager connectionManager = mock(ConnectionManager.class);
QueryOptions queryOptions = mock(QueryOptions.class);
when(queryOptions.get(ConnectionManager.class)).thenReturn(connectionManager);
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
""
);
assertEquals(connectionManager, carFeaturesOffHeapIndex.getConnectionManager(queryOptions));
}
@Test
public void testNotifyObjectsRemoved() throws Exception{
// Mock
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(Statement.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection).thenReturn(connection);
when(connectionManager.isApplyUpdateForIndexEnabled(any(SQLiteIndex.class))).thenReturn(true);
when(connection.createStatement()).thenReturn(statement);
when(connection.prepareStatement("DELETE FROM " + TABLE_NAME + " WHERE objectKey = ?;")).thenReturn(preparedStatement);
when(preparedStatement.executeBatch()).thenReturn(new int[] {1});
// The objects to add
Set<Car> removedObjects = new HashSet<Car>(2);
removedObjects.add(new Car(1, "Ford", "Focus", Car.Color.BLUE, 5, 9000.50, Arrays.asList("abs", "gps"), Collections.emptyList()));
removedObjects.add(new Car(2, "Honda", "Civic", Car.Color.RED, 5, 5000.00, Arrays.asList("airbags"), Collections.emptyList()));
@SuppressWarnings({"unchecked", "unused"})
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
""
);
carFeaturesOffHeapIndex.removeAll(objectSet(removedObjects), createQueryOptions(connectionManager));
// Verify
verify(statement, times(1)).executeUpdate("CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (objectKey INTEGER, value TEXT, PRIMARY KEY (objectKey, value)) WITHOUT ROWID;");
verify(statement, times(1)).executeUpdate("CREATE INDEX IF NOT EXISTS " + INDEX_NAME + " ON " + TABLE_NAME + " (value);");
verify(preparedStatement, times(1)).setObject(1, 1);
verify(preparedStatement, times(1)).setObject(1, 2);
verify(preparedStatement, times(2)).addBatch();
verify(preparedStatement, times(1)).executeBatch();
verify(connection, times(0)).close();
}
@Test
public void testNotifyObjectsAdded() throws Exception {
// Mock
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(Statement.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection).thenReturn(connection);
when(connectionManager.isApplyUpdateForIndexEnabled(any(SQLiteIndex.class))).thenReturn(true);
when(connection.createStatement()).thenReturn(statement);
when(connection.prepareStatement("INSERT OR IGNORE INTO " + TABLE_NAME + " values(?, ?);")).thenReturn(preparedStatement);
when(preparedStatement.executeBatch()).thenReturn(new int[] {2});
// The objects to add
Set<Car> addedObjects = new HashSet<Car>(2);
addedObjects.add(new Car(1, "Ford", "Focus", Car.Color.BLUE, 5, 9000.50, Arrays.asList("abs", "gps"), Collections.emptyList()));
addedObjects.add(new Car(2, "Honda", "Civic", Car.Color.RED, 5, 5000.00, Arrays.asList("airbags"), Collections.emptyList()));
// Create the index and cal the addAll
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
""
);
carFeaturesOffHeapIndex.addAll(objectSet(addedObjects), createQueryOptions(connectionManager));
// Verify
verify(statement, times(1)).executeUpdate("CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (objectKey INTEGER, value TEXT, PRIMARY KEY (objectKey, value)) WITHOUT ROWID;");
verify(statement, times(1)).executeUpdate("CREATE INDEX IF NOT EXISTS " + INDEX_NAME + " ON " + TABLE_NAME + " (value);");
verify(preparedStatement, times(2)).setObject(1, 1);
verify(preparedStatement, times(1)).setObject(1, 2);
verify(preparedStatement, times(1)).setObject(2, "abs");
verify(preparedStatement, times(1)).setObject(2, "gps");
verify(preparedStatement, times(1)).setObject(2, "airbags");
verify(preparedStatement, times(3)).addBatch();
verify(preparedStatement, times(1)).executeBatch();
verify(connection, times(0)).close();
}
@Test
public void testNotifyObjectsCleared() throws Exception{
// Mock
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(Statement.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection).thenReturn(connection);
when(connectionManager.isApplyUpdateForIndexEnabled(any(SQLiteIndex.class))).thenReturn(true);
when(connection.createStatement()).thenReturn(statement).thenReturn(statement).thenReturn(statement);
@SuppressWarnings({"unchecked", "unused"})
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
""
);
carFeaturesOffHeapIndex.clear(createQueryOptions(connectionManager));
// Verify
verify(statement, times(1)).executeUpdate("CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (objectKey INTEGER, value TEXT, PRIMARY KEY (objectKey, value)) WITHOUT ROWID;");
verify(statement, times(1)).executeUpdate("CREATE INDEX IF NOT EXISTS " + INDEX_NAME + " ON " + TABLE_NAME + " (value);");
verify(statement, times(1)).executeUpdate("DELETE FROM " + TABLE_NAME + ";");
verify(connection, times(0)).close();
}
/**
* Verifies that if connectionManager.isApplyUpdateForIndexEnabled() returns false,
* init() will do nothing.
*/
@Test
public void testInit_ApplyUpdateForIndexIsFalse() throws Exception{
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
// Simulate isApplyUpdateForIndexEnabled == false...
when(connectionManager.isApplyUpdateForIndexEnabled(any(SQLiteIndex.class))).thenReturn(false);
Statement statement = mock(Statement.class);
when(connection.createStatement()).thenReturn(statement);
java.sql.ResultSet journalModeRs = mock(java.sql.ResultSet.class);
java.sql.ResultSet synchronousRs = mock(java.sql.ResultSet.class);
when(journalModeRs.next()).thenReturn(true).thenReturn(false);
when(synchronousRs.next()).thenReturn(true).thenReturn(false);
when(journalModeRs.getString(1)).thenReturn("DELETE");
when(synchronousRs.getInt(1)).thenReturn(2);
when(statement.executeQuery("PRAGMA journal_mode;")).thenReturn(journalModeRs);
when(statement.executeQuery("PRAGMA synchronous;")).thenReturn(synchronousRs);
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
""
);
carFeaturesOffHeapIndex.init(emptyObjectStore(), createQueryOptions(connectionManager));
verify(statement, times(1)).executeQuery("PRAGMA journal_mode;");
verify(statement, times(1)).executeQuery("PRAGMA synchronous;");
verify(statement, times(2)).close();
Assert.assertEquals(carFeaturesOffHeapIndex.pragmaSynchronous, SQLiteConfig.SynchronousMode.FULL);
Assert.assertEquals(carFeaturesOffHeapIndex.pragmaJournalMode, SQLiteConfig.JournalMode.DELETE);
Assert.assertTrue(carFeaturesOffHeapIndex.canModifySyncAndJournaling);
}
/**
* Verifies that if connectionManager.isApplyUpdateForIndexEnabled() returns true,
* and the index table already exists, init() will do nothing.
*/
@Test
public void testInit_IndexTableExists() throws Exception{
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(Statement.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet tableCheckRs = mock(java.sql.ResultSet.class);
java.sql.ResultSet journalModeRs = mock(java.sql.ResultSet.class);
java.sql.ResultSet synchronousRs = mock(java.sql.ResultSet.class);
when(tableCheckRs.next()).thenReturn(true); // <- simulates a preexisting table
when(journalModeRs.next()).thenReturn(true).thenReturn(false);
when(synchronousRs.next()).thenReturn(true).thenReturn(false);
when(journalModeRs.getString(1)).thenReturn("DELETE");
when(synchronousRs.getInt(1)).thenReturn(2);
when(statement.executeQuery("SELECT 1 FROM sqlite_master WHERE type='table' AND name='cqtbl_features';")).thenReturn(tableCheckRs);
when(statement.executeQuery("PRAGMA journal_mode;")).thenReturn(journalModeRs);
when(statement.executeQuery("PRAGMA synchronous;")).thenReturn(synchronousRs);
when(connection.prepareStatement("INSERT OR IGNORE INTO " + TABLE_NAME + " values(?, ?);")).thenReturn(preparedStatement);
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connectionManager.isApplyUpdateForIndexEnabled(any(SQLiteIndex.class))).thenReturn(true);
when(connection.createStatement()).thenReturn(statement);
when(preparedStatement.executeBatch()).thenReturn(new int[] {2});
// The objects to add
Set<Car> initWithObjects = new HashSet<Car>(2);
initWithObjects.add(new Car(1, "Ford", "Focus", Car.Color.BLUE, 5, 9000.50, Arrays.asList("abs", "gps"), Collections.emptyList()));
initWithObjects.add(new Car(2, "Honda", "Civic", Car.Color.RED, 5, 5000.00, Arrays.asList("airbags"), Collections.emptyList()));
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
""
);
carFeaturesOffHeapIndex.init(wrappingObjectStore(initWithObjects), createQueryOptions(connectionManager));
// Verify
verify(statement, times(1)).executeQuery("PRAGMA journal_mode;");
verify(statement, times(1)).executeQuery("PRAGMA synchronous;");
// Verify we should not proceed to recreate the table...
verify(statement, times(0)).executeUpdate("CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (objectKey INTEGER, value TEXT, PRIMARY KEY (objectKey, value)) WITHOUT ROWID;");
}
/**
* Verifies that if connectionManager.isApplyUpdateForIndexEnabled() returns true,
* and the index table does not exist, init() will create and populate the index table.
*/
@Test
public void testInit_IndexTableDoesNotExist() throws Exception{
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(Statement.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet tableCheckRs = mock(java.sql.ResultSet.class);
java.sql.ResultSet journalModeRs = mock(java.sql.ResultSet.class);
java.sql.ResultSet synchronousRs = mock(java.sql.ResultSet.class);
when(tableCheckRs.next()).thenReturn(false); // <- simulates table does not already exist
when(journalModeRs.next()).thenReturn(true).thenReturn(false);
when(synchronousRs.next()).thenReturn(true).thenReturn(false);
when(journalModeRs.getString(1)).thenReturn("DELETE");
when(synchronousRs.getInt(1)).thenReturn(2);
when(statement.executeQuery("SELECT 1 FROM sqlite_master WHERE type='table' AND name='cqtbl_features';")).thenReturn(tableCheckRs);
when(statement.executeQuery("PRAGMA journal_mode;")).thenReturn(journalModeRs);
when(statement.executeQuery("PRAGMA synchronous;")).thenReturn(synchronousRs);
when(connection.prepareStatement("INSERT OR IGNORE INTO " + TABLE_NAME + " values(?, ?);")).thenReturn(preparedStatement);
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connectionManager.isApplyUpdateForIndexEnabled(any(SQLiteIndex.class))).thenReturn(true);
when(connection.createStatement()).thenReturn(statement);
when(preparedStatement.executeBatch()).thenReturn(new int[] {2});
// The objects to add
Set<Car> initWithObjects = new HashSet<Car>(2);
initWithObjects.add(new Car(1, "Ford", "Focus", Car.Color.BLUE, 5, 9000.50, Arrays.asList("abs", "gps"), Collections.emptyList()));
initWithObjects.add(new Car(2, "Honda", "Civic", Car.Color.RED, 5, 5000.00, Arrays.asList("airbags"), Collections.emptyList()));
SQLiteIndex<String, Car, Integer> carFeaturesOffHeapIndex = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
""
);
carFeaturesOffHeapIndex.init(wrappingObjectStore(initWithObjects), createQueryOptions(connectionManager));
// Verify
verify(statement, times(1)).executeQuery("PRAGMA journal_mode;");
verify(statement, times(1)).executeQuery("PRAGMA synchronous;");
verify(statement, times(1)).executeUpdate("CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (objectKey INTEGER, value TEXT, PRIMARY KEY (objectKey, value)) WITHOUT ROWID;");
verify(statement, times(1)).executeUpdate("CREATE INDEX IF NOT EXISTS " + INDEX_NAME + " ON " + TABLE_NAME + " (value);");
verify(statement, times(6)).close();
verify(preparedStatement, times(2)).setObject(1, 1);
verify(preparedStatement, times(1)).setObject(1, 2);
verify(preparedStatement, times(1)).setObject(2, "abs");
verify(preparedStatement, times(1)).setObject(2, "gps");
verify(preparedStatement, times(1)).setObject(2, "airbags");
verify(preparedStatement, times(3)).addBatch();
verify(preparedStatement, times(1)).executeBatch();
verify(preparedStatement, times(1)).close();
verify(connection, times(0)).close();
Assert.assertEquals(carFeaturesOffHeapIndex.pragmaSynchronous, SQLiteConfig.SynchronousMode.FULL);
Assert.assertEquals(carFeaturesOffHeapIndex.pragmaJournalMode, SQLiteConfig.JournalMode.DELETE);
Assert.assertTrue(carFeaturesOffHeapIndex.canModifySyncAndJournaling);
}
@Test
public void testNewResultSet_Size() throws Exception{
// Mocks
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.prepareStatement("SELECT COUNT(1) AS countDistinct FROM (SELECT objectKey FROM " + TABLE_NAME + " WHERE value = ? GROUP BY objectKey);")).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(preparedStatement);
when(resultSet.next()).thenReturn(true);
when(resultSet.getInt(1)).thenReturn(3);
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(equal(Car.FEATURES, "abs"), createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
int size = carsWithAbs.size();
assertEquals(3, size);
verify(connection, times(0)).close();
}
@Test
public void testNewResultSet_GetRetrievalCost() throws Exception{
// Mocks
ConnectionManager connectionManager = mock(ConnectionManager.class);
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(equal(Car.FEATURES, "abs"), createQueryOptions(connectionManager));
assertEquals(SQLiteIndex.INDEX_RETRIEVAL_COST, carsWithAbs.getRetrievalCost());
}
@Test
public void testNewResultSet_GetMergeCost() throws Exception{
// Mocks
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.prepareStatement("SELECT COUNT(objectKey) FROM " + TABLE_NAME + " WHERE value = ?;")).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(preparedStatement);
when(resultSet.next()).thenReturn(true);
when(resultSet.getInt(1)).thenReturn(3);
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(equal(Car.FEATURES, "abs"), createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
int size = carsWithAbs.getMergeCost();
assertEquals(3, size);
verify(connection, times(0)).close();
}
@Test
public void testNewResultSet_Contains() throws Exception{
// Mocks
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connectionContains = mock(Connection.class);
Connection connectionDoNotContain = mock(Connection.class);
PreparedStatement preparedStatementContains = mock(PreparedStatement.class);
PreparedStatement preparedStatementDoNotContains = mock(PreparedStatement.class);
java.sql.ResultSet resultSetContains = mock(java.sql.ResultSet.class);
java.sql.ResultSet resultSetDoNotContain = mock(java.sql.ResultSet.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connectionContains).thenReturn(connectionDoNotContain);
when(connectionContains.prepareStatement("SELECT objectKey FROM " + TABLE_NAME + " WHERE value = ? AND objectKey = ? LIMIT 1;")).thenReturn(preparedStatementContains);
when(connectionDoNotContain.prepareStatement("SELECT objectKey FROM " + TABLE_NAME + " WHERE value = ? AND objectKey = ? LIMIT 1;")).thenReturn(preparedStatementDoNotContains);
when(preparedStatementContains.executeQuery()).thenReturn(resultSetContains);
when(preparedStatementDoNotContains.executeQuery()).thenReturn(resultSetDoNotContain);
when(resultSetContains.next()).thenReturn(true).thenReturn(false);
when(resultSetDoNotContain.next()).thenReturn(false);
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(equal(Car.FEATURES, "abs"), createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
boolean resultContains = carsWithAbs.contains(data.get(0));
assertTrue(resultContains);
verify(connectionContains, times(0)).close();
boolean resultDoNotContain = carsWithAbs.contains(data.get(1));
assertFalse(resultDoNotContain);
verify(connectionDoNotContain, times(0)).close();
}
@Test(expected = IllegalStateException.class)
public void testNewResultSet_Iterator_Exception_Close() throws Exception{
// Mocks
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
@SuppressWarnings("unchecked")
SimpleAttribute<Integer, Car> idToObject = (SimpleAttribute<Integer, Car>)mock(SimpleAttribute.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.prepareStatement("SELECT DISTINCT objectKey FROM " + TABLE_NAME + " WHERE value = ?;")).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(preparedStatement);
when(resultSet.next()).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSet.getInt(1)).thenReturn(1).thenThrow(new SQLException("SQL exception"));
when(idToObject.getValue(eq(1), anyQueryOptions())).thenReturn(data.get(0));
// Iterator
try {
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
idToObject,
"")
.retrieve(equal(Car.FEATURES, "abs"), createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
Iterator<Car> carsWithAbsIterator = carsWithAbs.iterator();
assertNotNull(carsWithAbsIterator.next());
carsWithAbsIterator.next();// Should throw exception!
}finally {
verify(connection, times(0)).close(); // Connection should be left open
verify(preparedStatement, times(1)).close();
verify(resultSet, times(1)).close();
}
}
@Test
public void testNewResultSet_Iterator_Close() throws Exception{
// Mocks
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
@SuppressWarnings("unchecked")
SimpleAttribute<Integer, Car> idToObject = (SimpleAttribute<Integer, Car>)mock(SimpleAttribute.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.prepareStatement("SELECT DISTINCT objectKey FROM " + TABLE_NAME + " WHERE value = ?;")).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(preparedStatement);
when(resultSet.next()).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSet.getInt(1)).thenReturn(1).thenReturn(3);
when(idToObject.getValue(eq(1), anyQueryOptions())).thenReturn(data.get(0));
when(idToObject.getValue(eq(3), anyQueryOptions())).thenReturn(data.get(2));
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
idToObject,
"")
.retrieve(equal(Car.FEATURES, "abs"), createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
Iterator carsWithAbsIterator = carsWithAbs.iterator();
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
assertFalse(carsWithAbsIterator.hasNext());
// The end of the iteration should close the resources
verify(connection, times(0)).close(); // Connection should be left open
verify(preparedStatement, times(1)).close();
verify(resultSet, times(1)).close();
}
@Test
public void testNewResultSet_Close() throws Exception{
// Mocks
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
@SuppressWarnings("unchecked")
SimpleAttribute<Integer, Car> idToObject = (SimpleAttribute<Integer, Car>)mock(SimpleAttribute.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.prepareStatement("SELECT DISTINCT objectKey FROM " + TABLE_NAME + " WHERE value = ?;")).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(preparedStatement);
when(resultSet.next()).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSet.getInt(1)).thenReturn(1).thenReturn(3);
when(idToObject.getValue(eq(1), anyQueryOptions())).thenReturn(data.get(0));
when(idToObject.getValue(eq(3), anyQueryOptions())).thenReturn(data.get(2));
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
idToObject,
"")
.retrieve(equal(Car.FEATURES, "abs"), createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
Iterator carsWithAbsIterator = carsWithAbs.iterator();
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
// Do not continue with the iteration, but close
carsWithAbs.close();
verify(connection, times(0)).close(); // Connection should be left open
verify(preparedStatement, times(1)).close();
verify(resultSet, times(1)).close();
}
@Test
public void testRowIterable(){
Iterable<DBQueries.Row<Integer, String>> rows = SQLiteIndex.rowIterable(data, Car.CAR_ID, Car.FEATURES, null);
assertNotNull(rows);
Iterator<DBQueries.Row<Integer, String>> rowsIterator = rows.iterator();
assertNotNull(rowsIterator);
assertTrue(rowsIterator.hasNext());
assertEquals(new DBQueries.Row<Integer, String>(1, "abs"), rowsIterator.next());
assertTrue(rowsIterator.hasNext());
assertEquals(new DBQueries.Row<Integer, String>(1, "gps"), rowsIterator.next());
assertTrue(rowsIterator.hasNext());
assertEquals(new DBQueries.Row<Integer, String>(2, "airbags"), rowsIterator.next());
assertTrue(rowsIterator.hasNext());
assertEquals(new DBQueries.Row<Integer, String>(3, "abs"), rowsIterator.next());
assertTrue(rowsIterator.hasNext());
assertEquals(new DBQueries.Row<Integer, String>(5, "gps"), rowsIterator.next());
assertFalse(rowsIterator.hasNext());
}
@Test
public void testObjectKeyIterable(){
Iterable<Integer> objectKeys = SQLiteIndex.objectKeyIterable(data, Car.CAR_ID, null);
assertNotNull(objectKeys);
Iterator<Integer> objectKeysIterator = objectKeys.iterator();
assertNotNull(objectKeysIterator);
assertTrue(objectKeysIterator.hasNext());
assertEquals(new Integer(1), objectKeysIterator.next());
assertTrue(objectKeysIterator.hasNext());
assertEquals(new Integer(2), objectKeysIterator.next());
assertTrue(objectKeysIterator.hasNext());
assertEquals(new Integer(3), objectKeysIterator.next());
assertTrue(objectKeysIterator.hasNext());
assertEquals(new Integer(4), objectKeysIterator.next());
assertTrue(objectKeysIterator.hasNext());
assertEquals(new Integer(5), objectKeysIterator.next());
assertFalse(objectKeysIterator.hasNext());
}
@Test
public void testGetDistinctKeys_AllAscending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected = Arrays.asList("Accord", "Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6", "Prius", "Taurus");
List<String> actual = Lists.newArrayList(offHeapIndex.getDistinctKeys(createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_AllDescending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected = Arrays.asList("Taurus", "Prius", "M6", "Insight", "Hilux", "Fusion", "Focus", "Civic", "Avensis", "Accord");
List<String> actual = Lists.newArrayList(offHeapIndex.getDistinctKeysDescending(createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_GreaterThanExclusiveAscending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected, actual;
expected = Arrays.asList("Accord", "Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6", "Prius", "Taurus");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys("", false, null, true, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
expected = Arrays.asList("Accord", "Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6", "Prius", "Taurus");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys("A", false, null, true, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
expected = Arrays.asList("Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6", "Prius", "Taurus");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys("Accord", false, null, true, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_GreaterThanInclusiveAscending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected, actual;
expected = Arrays.asList("Accord", "Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6", "Prius", "Taurus");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys("Accord", true, null, true, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_LessThanExclusiveAscending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected, actual;
expected = Arrays.asList();
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys(null, true, "", false, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
expected = Arrays.asList("Accord", "Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6", "Prius", "Taurus");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys(null, true, "Z", false, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
expected = Arrays.asList("Accord", "Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys(null, true, "Prius", false, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_LessThanInclusiveAscending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected, actual;
expected = Arrays.asList("Accord", "Avensis", "Civic", "Focus", "Fusion", "Hilux", "Insight", "M6", "Prius");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys(null, true, "Prius", true, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_BetweenExclusiveAscending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected, actual;
expected = Arrays.asList("Focus", "Fusion", "Hilux");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys("Civic", false, "Insight", false, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_BetweenInclusiveAscending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected, actual;
expected = Arrays.asList("Civic", "Focus", "Fusion", "Hilux", "Insight");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeys("Civic", true, "Insight", true, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetDistinctKeys_BetweenInclusiveDescending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MODEL,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
List<String> expected, actual;
expected = Arrays.asList("Insight", "Hilux", "Fusion", "Focus", "Civic");
actual = Lists.newArrayList(offHeapIndex.getDistinctKeysDescending("Civic", true, "Insight", true, createQueryOptions(connectionManager)));
assertEquals(expected, actual);
}
@Test
public void testGetKeysAndValues(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MANUFACTURER,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
Multimap<String, Car> expected = MultimapBuilder.SetMultimapBuilder.linkedHashKeys().hashSetValues().build();
expected.put("BMW", CarFactory.createCar(9));
expected.put("Ford", CarFactory.createCar(0));
expected.put("Ford", CarFactory.createCar(1));
expected.put("Ford", CarFactory.createCar(2));
expected.put("Honda", CarFactory.createCar(3));
expected.put("Honda", CarFactory.createCar(4));
expected.put("Honda", CarFactory.createCar(5));
expected.put("Toyota", CarFactory.createCar(6));
expected.put("Toyota", CarFactory.createCar(7));
expected.put("Toyota", CarFactory.createCar(8));
Multimap<String, Car> actual = MultimapBuilder.SetMultimapBuilder.linkedHashKeys().hashSetValues().build();
CloseableIterable<KeyValue<String, Car>> keysAndValues = offHeapIndex.getKeysAndValues(createQueryOptions(connectionManager));
for (KeyValue<String, Car> keyValue : keysAndValues) {
actual.put(keyValue.getKey(), keyValue.getValue());
}
assertEquals("keys and values", expected, actual);
List<String> expectedKeysOrder = Lists.newArrayList(expected.keySet());
List<String> actualKeysOrder = Lists.newArrayList(actual.keySet());
assertEquals("key order", expectedKeysOrder, actualKeysOrder);
}
@Test
public void testGetKeysAndValuesDescending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MANUFACTURER,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(10), createQueryOptions(connectionManager));
Multimap<String, Car> expected = MultimapBuilder.SetMultimapBuilder.linkedHashKeys().hashSetValues().build();
expected.put("Toyota", CarFactory.createCar(6));
expected.put("Toyota", CarFactory.createCar(7));
expected.put("Toyota", CarFactory.createCar(8));
expected.put("Honda", CarFactory.createCar(3));
expected.put("Honda", CarFactory.createCar(4));
expected.put("Honda", CarFactory.createCar(5));
expected.put("Ford", CarFactory.createCar(0));
expected.put("Ford", CarFactory.createCar(1));
expected.put("Ford", CarFactory.createCar(2));
expected.put("BMW", CarFactory.createCar(9));
Multimap<String, Car> actual = MultimapBuilder.SetMultimapBuilder.linkedHashKeys().hashSetValues().build();
CloseableIterable<KeyValue<String, Car>> keysAndValues = offHeapIndex.getKeysAndValuesDescending(createQueryOptions(connectionManager));
for (KeyValue<String, Car> keyValue : keysAndValues) {
actual.put(keyValue.getKey(), keyValue.getValue());
}
assertEquals("keys and values", expected, actual);
List<String> expectedKeysOrder = Lists.newArrayList(expected.keySet());
List<String> actualKeysOrder = Lists.newArrayList(actual.keySet());
assertEquals("key order", expectedKeysOrder, actualKeysOrder);
}
@Test
public void testGetCountOfDistinctKeys(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MANUFACTURER,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(20), createQueryOptions(connectionManager));
Assert.assertEquals(Integer.valueOf(4), offHeapIndex.getCountOfDistinctKeys(createQueryOptions(connectionManager)));
}
@Test
public void testGetStatisticsForDistinctKeys(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MANUFACTURER,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(20), createQueryOptions(connectionManager));
Set<KeyStatistics<String>> keyStatistics = setOf(offHeapIndex.getStatisticsForDistinctKeys(createQueryOptions(connectionManager)));
Assert.assertEquals(setOf(
new KeyStatistics<String>("Ford", 6),
new KeyStatistics<String>("Honda", 6),
new KeyStatistics<String>("Toyota", 6),
new KeyStatistics<String>("BMW", 2)
),
keyStatistics);
}
@Test
public void testGetStatisticsForDistinctKeysDescending(){
ConnectionManager connectionManager = temporaryInMemoryDatabase.getConnectionManager(true);
SQLiteIndex<String, Car, Integer> offHeapIndex = SQLiteIndex.onAttribute(
Car.MANUFACTURER,
Car.CAR_ID,
new SimpleAttribute<Integer, Car>() {
@Override
public Car getValue(Integer carId, QueryOptions queryOptions) {
return CarFactory.createCar(carId);
}
}
);
offHeapIndex.addAll(createObjectSetOfCars(20), createQueryOptions(connectionManager));
Set<KeyStatistics<String>> keyStatistics = setOf(offHeapIndex.getStatisticsForDistinctKeysDescending(createQueryOptions(connectionManager)));
Assert.assertEquals(setOf(
new KeyStatistics<String>("Toyota", 6),
new KeyStatistics<String>("Honda", 6),
new KeyStatistics<String>("Ford", 6),
new KeyStatistics<String>("BMW", 2)
),
keyStatistics);
}
@Test(expected = IllegalStateException.class)
public void testNewResultSet_FilterQuery_Iterator_Exception_Close() throws Exception{
// Mocks
FilterQuery<Car, String> filterQuery = mockFilterQuery();
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
@SuppressWarnings("unchecked")
SimpleAttribute<Integer, Car> idToObject = (SimpleAttribute<Integer, Car>)mock(SimpleAttribute.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.createStatement()).thenReturn(statement);
when(statement.executeQuery("SELECT objectKey, value FROM " + TABLE_NAME + " ORDER BY objectKey;")).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(statement);
when(resultSet.next()).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSet.getInt(1)).thenReturn(1).thenThrow(new SQLException("SQL exception"));
when(idToObject.getValue(eq(1), anyQueryOptions())).thenReturn(data.get(0));
// Iterator
try {
ResultSet<Car> cars = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
idToObject,
"")
.retrieve(filterQuery, createQueryOptions(connectionManager));
assertNotNull(cars);
Iterator<Car> carsWithAbsIterator = cars.iterator();
assertNotNull(carsWithAbsIterator.next());
carsWithAbsIterator.next();// Should throw exception!
}finally {
verify(connection, times(0)).close(); // Connection should be left open
verify(statement, times(1)).close();
verify(resultSet, times(1)).close();
}
}
@Test
public void testNewResultSet_FilterQuery_Iterator_Close() throws Exception{
// Mocks
FilterQuery<Car, String> filterQuery = mockFilterQuery();
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
@SuppressWarnings("unchecked")
SimpleAttribute<Integer, Car> idToObject = (SimpleAttribute<Integer, Car>)mock(SimpleAttribute.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.createStatement()).thenReturn(statement);
when(statement.executeQuery("SELECT objectKey, value FROM " + TABLE_NAME + " ORDER BY objectKey;")).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(statement);
when(resultSet.next()).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSet.getInt(1)).thenReturn(1).thenReturn(1).thenReturn(2).thenReturn(3).thenReturn(4).thenReturn(5);
when(resultSet.getString(2)).thenReturn("abs").thenReturn("gps").thenReturn("airbags").thenReturn("abs").thenReturn("").thenReturn("gps");
when(idToObject.getValue(eq(1), anyQueryOptions())).thenReturn(data.get(0));
when(idToObject.getValue(eq(3), anyQueryOptions())).thenReturn(data.get(2));
when(idToObject.getValue(eq(5), anyQueryOptions())).thenReturn(data.get(4));
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
idToObject,
"")
.retrieve(filterQuery, createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
Iterator carsWithAbsIterator = carsWithAbs.iterator();
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
assertFalse(carsWithAbsIterator.hasNext());
// The end of the iteration should close the resources
verify(connection, times(0)).close(); // Connection should be left open
verify(statement, times(1)).close();
verify(resultSet, times(1)).close();
}
@Test
public void testNewResultSet_FilterQuery_Close() throws Exception{
// Mocks
FilterQuery<Car, String> filterQuery = mockFilterQuery();
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
@SuppressWarnings("unchecked")
SimpleAttribute<Integer, Car> idToObject = (SimpleAttribute<Integer, Car>)mock(SimpleAttribute.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.createStatement()).thenReturn(statement);
when(statement.executeQuery("SELECT objectKey, value FROM " + TABLE_NAME + " ORDER BY objectKey;")).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(statement);
when(resultSet.next()).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSet.getInt(1)).thenReturn(1).thenReturn(1).thenReturn(2).thenReturn(3).thenReturn(4).thenReturn(5);
when(resultSet.getString(2)).thenReturn("abs").thenReturn("gps").thenReturn("airbags").thenReturn("abs").thenReturn("").thenReturn("gps");
when(idToObject.getValue(eq(1), anyQueryOptions())).thenReturn(data.get(0));
when(idToObject.getValue(eq(3), anyQueryOptions())).thenReturn(data.get(2));
when(idToObject.getValue(eq(5), anyQueryOptions())).thenReturn(data.get(4));
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
idToObject,
"")
.retrieve(filterQuery, createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
Iterator carsWithAbsIterator = carsWithAbs.iterator();
assertTrue(carsWithAbsIterator.hasNext());
assertNotNull(carsWithAbsIterator.next());
// Do not continue with the iteration, but close
carsWithAbs.close();
verify(connection, times(0)).close(); // Connection should be left open
verify(statement, times(1)).close();
verify(resultSet, times(1)).close();
}
@Test
public void testNewResultSet_FilterQuery_GetMergeCost() throws Exception{
// Mocks
FilterQuery<Car, String> filterQuery = mockFilterQuery();
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
// Behaviour
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.prepareStatement("SELECT COUNT(objectKey) FROM " + TABLE_NAME + " ;")).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(preparedStatement);
when(resultSet.next()).thenReturn(true);
when(resultSet.getInt(1)).thenReturn(3);
// Iterator
ResultSet<Car> cars = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(filterQuery, createQueryOptions(connectionManager));
assertNotNull(cars);
int mergeCost = cars.getMergeCost();
assertEquals(3, mergeCost);
verify(connection, times(0)).close();
}
@Test
public void testNewResultSet_FilterQuery_GetRetrievalCost(){
// Mocks
FilterQuery<Car, String> filterQuery = mockFilterQuery();
ConnectionManager connectionManager = mock(ConnectionManager.class);
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(filterQuery, createQueryOptions(connectionManager));
assertEquals(carsWithAbs.getRetrievalCost(), SQLiteIndex.INDEX_RETRIEVAL_COST_FILTERING);
}
@Test
public void testNewResultSet_FilterQuery_Contains() throws Exception{
// Mocks
FilterQuery<Car, String> filterQuery = mockFilterQuery();
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connectionContains = mock(Connection.class);
Connection connectionDoNotContain = mock(Connection.class);
Connection connectionNoRows = mock(Connection.class);
PreparedStatement preparedStatementContains = mock(PreparedStatement.class);
PreparedStatement preparedStatementDoNotContains = mock(PreparedStatement.class);
PreparedStatement preparedStatementNoRows = mock(PreparedStatement.class);
java.sql.ResultSet resultSetContains = mock(java.sql.ResultSet.class);
java.sql.ResultSet resultSetDoNotContain = mock(java.sql.ResultSet.class);
java.sql.ResultSet resultSetNoRows = mock(java.sql.ResultSet.class);
// Behaviour
//SELECT objectKey, value FROM cqtbl_%s WHERE objectKey=?
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connectionContains).thenReturn(connectionDoNotContain).thenReturn(connectionNoRows);
when(connectionContains.prepareStatement("SELECT objectKey, value FROM " + TABLE_NAME + " WHERE objectKey = ?")).thenReturn(preparedStatementContains);
when(connectionDoNotContain.prepareStatement("SELECT objectKey, value FROM " + TABLE_NAME + " WHERE objectKey = ?")).thenReturn(preparedStatementDoNotContains);
when(connectionNoRows.prepareStatement("SELECT objectKey, value FROM " + TABLE_NAME + " WHERE objectKey = ?")).thenReturn(preparedStatementNoRows);
when(preparedStatementContains.executeQuery()).thenReturn(resultSetContains);
when(preparedStatementDoNotContains.executeQuery()).thenReturn(resultSetDoNotContain);
when(preparedStatementNoRows.executeQuery()).thenReturn(resultSetNoRows);
when(resultSetContains.next()).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSetContains.getInt(1)).thenReturn(1).thenReturn(1);
when(resultSetContains.getString(2)).thenReturn("abs").thenReturn("gps");
when(resultSetDoNotContain.next()).thenReturn(true).thenReturn(false);
when(resultSetDoNotContain.getInt(1)).thenReturn(2);
when(resultSetDoNotContain.getString(2)).thenReturn("airbags");
when(resultSetNoRows.next()).thenReturn(false);
// Iterator
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(filterQuery, createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
boolean resultContains = carsWithAbs.contains(data.get(0));
assertTrue(resultContains);
verify(connectionContains, times(0)).close();
boolean resultDoNotContain = carsWithAbs.contains(data.get(1));
assertFalse(resultDoNotContain);
verify(connectionDoNotContain, times(0)).close();
boolean resultNoRows = carsWithAbs.contains(CarFactory.createCar(100));
assertFalse(resultNoRows);
verify(connectionNoRows, times(0)).close();
}
@Test
public void testNewResultSet_FilterQuery_Size() throws Exception{
// Mocks
FilterQuery<Car, String> filterQuery = mockFilterQuery();
ConnectionManager connectionManager = mock(ConnectionManager.class);
Connection connection = mock(Connection.class);
Statement statement = mock(PreparedStatement.class);
java.sql.ResultSet resultSet = mock(java.sql.ResultSet.class);
// Behaviour//
when(connectionManager.getConnection(any(SQLiteIndex.class), anyQueryOptions())).thenReturn(connection);
when(connection.createStatement()).thenReturn(statement);
when(statement.executeQuery("SELECT objectKey, value FROM " + TABLE_NAME + " ORDER BY objectKey;")).thenReturn(resultSet);
when(resultSet.getStatement()).thenReturn(statement);
when(resultSet.next()).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(false);
when(resultSet.getInt(1)).thenReturn(1).thenReturn(1).thenReturn(2).thenReturn(3).thenReturn(4).thenReturn(5);
when(resultSet.getString(2)).thenReturn("abs").thenReturn("gps").thenReturn("airbags").thenReturn("abs").thenReturn("").thenReturn("gps");
ResultSet<Car> carsWithAbs = new SQLiteIndex<String, Car, Integer>(
Car.FEATURES,
OBJECT_TO_ID,
ID_TO_OBJECT,
"")
.retrieve(filterQuery, createQueryOptions(connectionManager));
assertNotNull(carsWithAbs);
int size = carsWithAbs.size();
assertEquals(3, size);
verify(connection, times(0)).close();
}
static FilterQuery<Car, String> mockFilterQuery(){
@SuppressWarnings("unchecked")
FilterQuery<Car, String> filterQuery = (FilterQuery<Car, String>)mock(FilterQuery.class);
when(filterQuery.matchesValue(Mockito.anyString(), any(QueryOptions.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocationOnMock) throws Throwable {
Object[] args = invocationOnMock.getArguments();
if (args != null && args.length == 2 && args[0] instanceof String) {
String value = (String) args[0];
return "abs".equals(value) || "gps".equals(value);
}
throw new IllegalStateException("matchesValue invocation not expected. Args " + Arrays.toString(args));
}
});
return filterQuery;
}
static QueryOptions createQueryOptions(ConnectionManager connectionManager) {
QueryOptions queryOptions = new QueryOptions();
queryOptions.put(ConnectionManager.class, connectionManager);
return queryOptions;
}
static ObjectStore<Car> emptyObjectStore() {
return new ConcurrentOnHeapObjectStore<Car>();
}
static ObjectStore<Car> wrappingObjectStore(Collection<Car> objects) {
ConcurrentOnHeapObjectStore<Car> objectStore = new ConcurrentOnHeapObjectStore<Car>();
objectStore.addAll(objects, noQueryOptions());
return objectStore;
}
static QueryOptions anyQueryOptions() {
return Mockito.any();
}
static ObjectSet<Car> createObjectSetOfCars(int numCars) {
return ObjectSet.fromCollection(CarFactory.createCollectionOfCars(numCars));
}
static ObjectSet<Car> objectSet(Collection<Car> collection) {
return ObjectSet.fromCollection(collection);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
import java.io.Serializable;
/**
* <p>
* Provides details of the <code>WorkflowExecutionCancelRequested</code>
* event.
* </p>
*/
public class WorkflowExecutionCancelRequestedEventAttributes implements Serializable, Cloneable {
/**
* The external workflow execution for which the cancellation was
* requested.
*/
private WorkflowExecution externalWorkflowExecution;
/**
* The ID of the
* <code>RequestCancelExternalWorkflowExecutionInitiated</code> event
* corresponding to the
* <code>RequestCancelExternalWorkflowExecution</code> decision to cancel
* this workflow execution.The source event with this ID can be found in
* the history of the source workflow execution. This information can be
* useful for diagnosing problems by tracing back the chain of events
* leading up to this event.
*/
private Long externalInitiatedEventId;
/**
* If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>CHILD_POLICY_APPLIED
*/
private String cause;
/**
* The external workflow execution for which the cancellation was
* requested.
*
* @return The external workflow execution for which the cancellation was
* requested.
*/
public WorkflowExecution getExternalWorkflowExecution() {
return externalWorkflowExecution;
}
/**
* The external workflow execution for which the cancellation was
* requested.
*
* @param externalWorkflowExecution The external workflow execution for which the cancellation was
* requested.
*/
public void setExternalWorkflowExecution(WorkflowExecution externalWorkflowExecution) {
this.externalWorkflowExecution = externalWorkflowExecution;
}
/**
* The external workflow execution for which the cancellation was
* requested.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param externalWorkflowExecution The external workflow execution for which the cancellation was
* requested.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionCancelRequestedEventAttributes withExternalWorkflowExecution(WorkflowExecution externalWorkflowExecution) {
this.externalWorkflowExecution = externalWorkflowExecution;
return this;
}
/**
* The ID of the
* <code>RequestCancelExternalWorkflowExecutionInitiated</code> event
* corresponding to the
* <code>RequestCancelExternalWorkflowExecution</code> decision to cancel
* this workflow execution.The source event with this ID can be found in
* the history of the source workflow execution. This information can be
* useful for diagnosing problems by tracing back the chain of events
* leading up to this event.
*
* @return The ID of the
* <code>RequestCancelExternalWorkflowExecutionInitiated</code> event
* corresponding to the
* <code>RequestCancelExternalWorkflowExecution</code> decision to cancel
* this workflow execution.The source event with this ID can be found in
* the history of the source workflow execution. This information can be
* useful for diagnosing problems by tracing back the chain of events
* leading up to this event.
*/
public Long getExternalInitiatedEventId() {
return externalInitiatedEventId;
}
/**
* The ID of the
* <code>RequestCancelExternalWorkflowExecutionInitiated</code> event
* corresponding to the
* <code>RequestCancelExternalWorkflowExecution</code> decision to cancel
* this workflow execution.The source event with this ID can be found in
* the history of the source workflow execution. This information can be
* useful for diagnosing problems by tracing back the chain of events
* leading up to this event.
*
* @param externalInitiatedEventId The ID of the
* <code>RequestCancelExternalWorkflowExecutionInitiated</code> event
* corresponding to the
* <code>RequestCancelExternalWorkflowExecution</code> decision to cancel
* this workflow execution.The source event with this ID can be found in
* the history of the source workflow execution. This information can be
* useful for diagnosing problems by tracing back the chain of events
* leading up to this event.
*/
public void setExternalInitiatedEventId(Long externalInitiatedEventId) {
this.externalInitiatedEventId = externalInitiatedEventId;
}
/**
* The ID of the
* <code>RequestCancelExternalWorkflowExecutionInitiated</code> event
* corresponding to the
* <code>RequestCancelExternalWorkflowExecution</code> decision to cancel
* this workflow execution.The source event with this ID can be found in
* the history of the source workflow execution. This information can be
* useful for diagnosing problems by tracing back the chain of events
* leading up to this event.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param externalInitiatedEventId The ID of the
* <code>RequestCancelExternalWorkflowExecutionInitiated</code> event
* corresponding to the
* <code>RequestCancelExternalWorkflowExecution</code> decision to cancel
* this workflow execution.The source event with this ID can be found in
* the history of the source workflow execution. This information can be
* useful for diagnosing problems by tracing back the chain of events
* leading up to this event.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionCancelRequestedEventAttributes withExternalInitiatedEventId(Long externalInitiatedEventId) {
this.externalInitiatedEventId = externalInitiatedEventId;
return this;
}
/**
* If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>CHILD_POLICY_APPLIED
*
* @return If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
*
* @see WorkflowExecutionCancelRequestedCause
*/
public String getCause() {
return cause;
}
/**
* If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>CHILD_POLICY_APPLIED
*
* @param cause If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
*
* @see WorkflowExecutionCancelRequestedCause
*/
public void setCause(String cause) {
this.cause = cause;
}
/**
* If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>CHILD_POLICY_APPLIED
*
* @param cause If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see WorkflowExecutionCancelRequestedCause
*/
public WorkflowExecutionCancelRequestedEventAttributes withCause(String cause) {
this.cause = cause;
return this;
}
/**
* If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>CHILD_POLICY_APPLIED
*
* @param cause If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
*
* @see WorkflowExecutionCancelRequestedCause
*/
public void setCause(WorkflowExecutionCancelRequestedCause cause) {
this.cause = cause.toString();
}
/**
* If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>CHILD_POLICY_APPLIED
*
* @param cause If set, indicates that the request to cancel the workflow execution
* was automatically generated, and specifies the cause. This happens if
* the parent workflow execution times out or is terminated, and the
* child policy is set to cancel child executions.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see WorkflowExecutionCancelRequestedCause
*/
public WorkflowExecutionCancelRequestedEventAttributes withCause(WorkflowExecutionCancelRequestedCause cause) {
this.cause = cause.toString();
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getExternalWorkflowExecution() != null) sb.append("ExternalWorkflowExecution: " + getExternalWorkflowExecution() + ",");
if (getExternalInitiatedEventId() != null) sb.append("ExternalInitiatedEventId: " + getExternalInitiatedEventId() + ",");
if (getCause() != null) sb.append("Cause: " + getCause() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getExternalWorkflowExecution() == null) ? 0 : getExternalWorkflowExecution().hashCode());
hashCode = prime * hashCode + ((getExternalInitiatedEventId() == null) ? 0 : getExternalInitiatedEventId().hashCode());
hashCode = prime * hashCode + ((getCause() == null) ? 0 : getCause().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof WorkflowExecutionCancelRequestedEventAttributes == false) return false;
WorkflowExecutionCancelRequestedEventAttributes other = (WorkflowExecutionCancelRequestedEventAttributes)obj;
if (other.getExternalWorkflowExecution() == null ^ this.getExternalWorkflowExecution() == null) return false;
if (other.getExternalWorkflowExecution() != null && other.getExternalWorkflowExecution().equals(this.getExternalWorkflowExecution()) == false) return false;
if (other.getExternalInitiatedEventId() == null ^ this.getExternalInitiatedEventId() == null) return false;
if (other.getExternalInitiatedEventId() != null && other.getExternalInitiatedEventId().equals(this.getExternalInitiatedEventId()) == false) return false;
if (other.getCause() == null ^ this.getCause() == null) return false;
if (other.getCause() != null && other.getCause().equals(this.getCause()) == false) return false;
return true;
}
@Override
public WorkflowExecutionCancelRequestedEventAttributes clone() {
try {
return (WorkflowExecutionCancelRequestedEventAttributes) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.util;
import java.io.PrintWriter;
import java.io.Writer;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* A sample DOM writer. This sample program illustrates how to traverse a DOM
* tree in order to print a document that is parsed.
*/
public class DOMWriter {
private final PrintWriter out;
private final boolean canonical;
public DOMWriter(Writer writer, boolean canonical) {
out = new PrintWriter(writer);
this.canonical = canonical;
}
/**
* Prints the specified node, recursively.
* @param node The node to output
*/
public void print(Node node) {
// is there anything to do?
if (node == null) {
return;
}
int type = node.getNodeType();
switch (type) {
// print document
case Node.DOCUMENT_NODE:
if (!canonical) {
out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
}
print(((Document) node).getDocumentElement());
out.flush();
break;
// print element with attributes
case Node.ELEMENT_NODE:
out.print('<');
out.print(node.getLocalName());
Attr attrs[] = sortAttributes(node.getAttributes());
for (int i = 0; i < attrs.length; i++) {
Attr attr = attrs[i];
out.print(' ');
out.print(attr.getLocalName());
out.print("=\"");
out.print(escape(attr.getNodeValue()));
out.print('"');
}
out.print('>');
printChildren(node);
break;
// handle entity reference nodes
case Node.ENTITY_REFERENCE_NODE:
if (canonical) {
printChildren(node);
} else {
out.print('&');
out.print(node.getLocalName());
out.print(';');
}
break;
// print cdata sections
case Node.CDATA_SECTION_NODE:
if (canonical) {
out.print(escape(node.getNodeValue()));
} else {
out.print("<![CDATA[");
out.print(node.getNodeValue());
out.print("]]>");
}
break;
// print text
case Node.TEXT_NODE:
out.print(escape(node.getNodeValue()));
break;
// print processing instruction
case Node.PROCESSING_INSTRUCTION_NODE:
out.print("<?");
out.print(node.getLocalName());
String data = node.getNodeValue();
if (data != null && data.length() > 0) {
out.print(' ');
out.print(data);
}
out.print("?>");
break;
}
if (type == Node.ELEMENT_NODE) {
out.print("</");
out.print(node.getLocalName());
out.print('>');
}
out.flush();
} // print(Node)
private void printChildren(Node node) {
NodeList children = node.getChildNodes();
if (children != null) {
int len = children.getLength();
for (int i = 0; i < len; i++) {
print(children.item(i));
}
}
}
/**
* Returns a sorted list of attributes.
* @param attrs The map to sort
* @return a sorted attribute array
*/
private Attr[] sortAttributes(NamedNodeMap attrs) {
if (attrs == null) {
return new Attr[0];
}
int len = attrs.getLength();
Attr array[] = new Attr[len];
for (int i = 0; i < len; i++) {
array[i] = (Attr) attrs.item(i);
}
for (int i = 0; i < len - 1; i++) {
String name = null;
name = array[i].getLocalName();
int index = i;
for (int j = i + 1; j < len; j++) {
String curName = null;
curName = array[j].getLocalName();
if (curName.compareTo(name) < 0) {
name = curName;
index = j;
}
}
if (index != i) {
Attr temp = array[i];
array[i] = array[index];
array[index] = temp;
}
}
return (array);
}
/**
* Normalizes the given string.
* @param s The string to escape
* @return the escaped string
*/
private String escape(String s) {
if (s == null) {
return "";
}
StringBuilder str = new StringBuilder();
int len = s.length();
for (int i = 0; i < len; i++) {
char ch = s.charAt(i);
switch (ch) {
case '<':
str.append("<");
break;
case '>':
str.append(">");
break;
case '&':
str.append("&");
break;
case '"':
str.append(""");
break;
case '\r':
case '\n':
if (canonical) {
str.append("&#");
str.append(Integer.toString(ch));
str.append(';');
break;
}
// else, default append char
//$FALL-THROUGH$
default:
str.append(ch);
}
}
return (str.toString());
}
}
| |
/*
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.rxjava3.internal.operators.flowable;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import io.reactivex.rxjava3.core.*;
import io.reactivex.rxjava3.functions.*;
import io.reactivex.rxjava3.internal.fuseable.*;
import io.reactivex.rxjava3.operators.QueueFuseable;
import io.reactivex.rxjava3.plugins.RxJavaPlugins;
import io.reactivex.rxjava3.schedulers.Schedulers;
import io.reactivex.rxjava3.subscribers.TestSubscriber;
import io.reactivex.rxjava3.testsupport.*;
public class FlowableFromCompletableTest extends RxJavaTest {
@Test
public void fromCompletable() {
final AtomicInteger atomicInteger = new AtomicInteger();
Flowable.fromCompletable(Completable.fromAction(new Action() {
@Override
public void run() throws Exception {
atomicInteger.incrementAndGet();
}
}))
.test()
.assertResult();
assertEquals(1, atomicInteger.get());
}
@Test
public void fromCompletableTwice() {
final AtomicInteger atomicInteger = new AtomicInteger();
Action run = new Action() {
@Override
public void run() throws Exception {
atomicInteger.incrementAndGet();
}
};
Flowable.fromCompletable(Completable.fromAction(run))
.test()
.assertResult();
assertEquals(1, atomicInteger.get());
Flowable.fromCompletable(Completable.fromAction(run))
.test()
.assertResult();
assertEquals(2, atomicInteger.get());
}
@Test
public void fromCompletableInvokesLazy() {
final AtomicInteger atomicInteger = new AtomicInteger();
Flowable<Object> source = Flowable.fromCompletable(Completable.fromAction(new Action() {
@Override
public void run() throws Exception {
atomicInteger.incrementAndGet();
}
}));
assertEquals(0, atomicInteger.get());
source
.test()
.assertResult();
assertEquals(1, atomicInteger.get());
}
@Test
public void fromCompletableThrows() {
Flowable.fromCompletable(Completable.fromAction(new Action() {
@Override
public void run() throws Exception {
throw new UnsupportedOperationException();
}
}))
.test()
.assertFailure(UnsupportedOperationException.class);
}
@Test
public void noErrorLoss() throws Exception {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final CountDownLatch cdl1 = new CountDownLatch(1);
final CountDownLatch cdl2 = new CountDownLatch(1);
TestSubscriber<Object> ts = Flowable.fromCompletable(Completable.fromAction(new Action() {
@Override
public void run() throws Exception {
cdl1.countDown();
cdl2.await(5, TimeUnit.SECONDS);
}
}))
.subscribeOn(Schedulers.single()).test();
assertTrue(cdl1.await(5, TimeUnit.SECONDS));
ts.cancel();
int timeout = 10;
while (timeout-- > 0 && errors.isEmpty()) {
Thread.sleep(100);
}
TestHelper.assertUndeliverable(errors, 0, InterruptedException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void disposedUpfront() throws Throwable {
Action run = mock(Action.class);
Flowable.fromCompletable(Completable.fromAction(run))
.test(1L, true)
.assertEmpty();
verify(run, never()).run();
}
@Test
public void cancelWhileRunning() {
final TestSubscriber<Object> ts = new TestSubscriber<>();
Flowable.fromCompletable(Completable.fromAction(new Action() {
@Override
public void run() throws Exception {
ts.cancel();
}
}))
.subscribeWith(ts)
.assertEmpty();
assertTrue(ts.isCancelled());
}
@Test
public void asyncFused() throws Throwable {
TestSubscriberEx<Object> ts = new TestSubscriberEx<>();
ts.setInitialFusionMode(QueueFuseable.ASYNC);
Action action = mock(Action.class);
Flowable.fromCompletable(Completable.fromAction(action))
.subscribe(ts);
ts.assertFusionMode(QueueFuseable.ASYNC)
.assertResult();
verify(action).run();
}
@Test
public void syncFusedRejected() throws Throwable {
TestSubscriberEx<Object> ts = new TestSubscriberEx<>();
ts.setInitialFusionMode(QueueFuseable.SYNC);
Action action = mock(Action.class);
Flowable.fromCompletable(Completable.fromAction(action))
.subscribe(ts);
ts.assertFusionMode(QueueFuseable.NONE)
.assertResult();
verify(action).run();
}
@Test
public void upstream() {
Flowable<?> f = Flowable.fromCompletable(Completable.never());
assertTrue(f instanceof HasUpstreamCompletableSource);
assertSame(Completable.never(), ((HasUpstreamCompletableSource)f).source());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.nested;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NestedPathFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalMax;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.nested;
import static org.elasticsearch.search.aggregations.AggregationBuilders.reverseNested;
public class ReverseNestedAggregatorTests extends AggregatorTestCase {
private static final String VALUE_FIELD_NAME = "number";
private static final String NESTED_OBJECT = "nested_object";
private static final String NESTED_AGG = "nestedAgg";
private static final String REVERSE_AGG_NAME = "reverseNestedAgg";
private static final String MAX_AGG_NAME = "maxAgg";
/**
* For each provided field type, we also register an alias with name {@code <field>-alias}.
*/
@Override
protected Map<String, MappedFieldType> getFieldAliases(MappedFieldType... fieldTypes) {
return Arrays.stream(fieldTypes).collect(Collectors.toMap(
ft -> ft.name() + "-alias",
Function.identity()));
}
public void testNoDocs() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
// intentionally not writing any docs
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
NESTED_OBJECT);
ReverseNestedAggregationBuilder reverseNestedBuilder
= new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME);
nestedBuilder.subAggregation(reverseNestedBuilder);
MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
.field(VALUE_FIELD_NAME);
reverseNestedBuilder.subAggregation(maxAgg);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
Nested nested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), nestedBuilder, fieldType);
ReverseNested reverseNested = (ReverseNested)
((InternalAggregation)nested).getProperty(REVERSE_AGG_NAME);
assertEquals(REVERSE_AGG_NAME, reverseNested.getName());
assertEquals(0, reverseNested.getDocCount());
InternalMax max = (InternalMax)
((InternalAggregation)reverseNested).getProperty(MAX_AGG_NAME);
assertEquals(MAX_AGG_NAME, max.getName());
assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), Double.MIN_VALUE);
}
}
}
public void testMaxFromParentDocs() throws IOException {
int numParentDocs = randomIntBetween(1, 20);
int expectedParentDocs = 0;
int expectedNestedDocs = 0;
double expectedMaxValue = Double.NEGATIVE_INFINITY;
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numParentDocs; i++) {
List<Document> documents = new ArrayList<>();
int numNestedDocs = randomIntBetween(0, 20);
for (int nested = 0; nested < numNestedDocs; nested++) {
Document document = new Document();
document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)),
IdFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(NestedPathFieldMapper.NAME, NESTED_OBJECT,
NestedPathFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
expectedNestedDocs++;
}
Document document = new Document();
document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)),
IdFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(NestedPathFieldMapper.NAME, "test",
NestedPathFieldMapper.Defaults.FIELD_TYPE));
long value = randomNonNegativeLong() % 10000;
document.add(new SortedNumericDocValuesField(VALUE_FIELD_NAME, value));
document.add(SeqNoFieldMapper.SequenceIDFields.emptySeqID().primaryTerm);
if (numNestedDocs > 0) {
expectedMaxValue = Math.max(expectedMaxValue, value);
expectedParentDocs++;
}
documents.add(document);
iw.addDocuments(documents);
}
iw.commit();
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
NESTED_OBJECT);
ReverseNestedAggregationBuilder reverseNestedBuilder
= new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME);
nestedBuilder.subAggregation(reverseNestedBuilder);
MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
.field(VALUE_FIELD_NAME);
reverseNestedBuilder.subAggregation(maxAgg);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
Nested nested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), nestedBuilder, fieldType);
assertEquals(expectedNestedDocs, nested.getDocCount());
ReverseNested reverseNested = (ReverseNested)
((InternalAggregation)nested).getProperty(REVERSE_AGG_NAME);
assertEquals(REVERSE_AGG_NAME, reverseNested.getName());
assertEquals(expectedParentDocs, reverseNested.getDocCount());
InternalMax max = (InternalMax)
((InternalAggregation)reverseNested).getProperty(MAX_AGG_NAME);
assertEquals(MAX_AGG_NAME, max.getName());
assertEquals(expectedMaxValue, max.getValue(), Double.MIN_VALUE);
}
}
}
public void testFieldAlias() throws IOException {
int numParentDocs = randomIntBetween(1, 20);
int expectedParentDocs = 0;
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numParentDocs; i++) {
List<Document> documents = new ArrayList<>();
int numNestedDocs = randomIntBetween(0, 20);
if (numNestedDocs > 0) {
expectedParentDocs++;
}
for (int nested = 0; nested < numNestedDocs; nested++) {
Document document = new Document();
document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)),
IdFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(NestedPathFieldMapper.NAME, NESTED_OBJECT,
NestedPathFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
}
Document document = new Document();
document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)),
IdFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(NestedPathFieldMapper.NAME, "test",
NestedPathFieldMapper.Defaults.FIELD_TYPE));
long value = randomNonNegativeLong() % 10000;
document.add(new SortedNumericDocValuesField(VALUE_FIELD_NAME, value));
document.add(SeqNoFieldMapper.SequenceIDFields.emptySeqID().primaryTerm);
documents.add(document);
iw.addDocuments(documents);
}
iw.commit();
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
MaxAggregationBuilder maxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME);
MaxAggregationBuilder aliasMaxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias");
NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation(
reverseNested(REVERSE_AGG_NAME).subAggregation(maxAgg));
NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation(
reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg));
Nested nested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), agg, fieldType);
Nested aliasNested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), aliasAgg, fieldType);
ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME);
ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME);
assertEquals(reverseNested, aliasReverseNested);
assertEquals(expectedParentDocs, reverseNested.getDocCount());
}
}
}
}
| |
package org.jboss.resteasy.test.security.smime;
import org.bouncycastle.cms.CMSAlgorithm;
import org.bouncycastle.cms.CMSEnvelopedDataStreamGenerator;
import org.bouncycastle.cms.CMSException;
import org.bouncycastle.cms.RecipientId;
import org.bouncycastle.cms.RecipientInformation;
import org.bouncycastle.cms.RecipientInformationStore;
import org.bouncycastle.cms.jcajce.*;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.mail.smime.SMIMEEnveloped;
import org.bouncycastle.mail.smime.SMIMEEnvelopedGenerator;
import org.bouncycastle.mail.smime.SMIMEException;
import org.bouncycastle.mail.smime.SMIMEUtil;
import org.bouncycastle.operator.OutputEncryptor;
import org.jboss.resteasy.security.PemUtils;
import org.jboss.resteasy.util.Base64;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import javax.mail.MessagingException;
import javax.mail.internet.InternetHeaders;
import javax.mail.internet.MimeBodyPart;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.Security;
import java.security.cert.X509Certificate;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class EnvelopedTest
{
private static String python_smime = "MIIBagYJKoZIhvcNAQcDoIIBWzCCAVcCAQAxgewwgekCAQAwUjBFMQswCQYDVQQG\n" +
"EwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lk\n" +
"Z2l0cyBQdHkgTHRkAgkA7oW81OriflAwDQYJKoZIhvcNAQEBBQAEgYA1AWIoRMsb\n" +
"Gv2DsHLjcvu6URZPqS0atjGW7uqlthmoQ4XB+l0y+iy2rXFuJnz+iLp/EIn92UpR\n" +
"ZeFHPoQEDklkk5QqRaIBvkZiJgiPs9VuWiXVfHeOei9Oneyfja9Q88eFWHFToWok\n" +
"LIDie+Wt/mMYY23QSVTY3r+cgTnOyV8gyDBjBgkqhkiG9w0BBwEwFAYIKoZIhvcN\n" +
"AwcECJYFaD/eHDkHgEDIBLBzczEdLLk7nQzORmVist6gv30Ez9LCzHlnFteU+jVr\n" +
"zAUGo6VoZZMmyLVeYEZoXqEjY6fN+rpSWoUVtNQM";
private static X509Certificate cert;
private static PrivateKey privateKey;
@BeforeClass
public static void setup() throws Exception
{
Security.addProvider(new BouncyCastleProvider());
/*
InputStream certIs = Thread.currentThread().getContextClassLoader().getResourceAsStream("mycert.der");
cert = PemUtils.getCertificateFromDer(certIs);
*/
InputStream certIs = Thread.currentThread().getContextClassLoader().getResourceAsStream("mycert.pem");
cert = PemUtils.decodeCertificate(certIs);
/*
InputStream privateIs = Thread.currentThread().getContextClassLoader().getResourceAsStream("mycert-private.der");
privateKey = PemUtils.getPrivateFromDer(privateIs);
*/
InputStream privateIs = Thread.currentThread().getContextClassLoader().getResourceAsStream("mycert-private.pem");
privateKey = PemUtils.decodePrivateKey(privateIs);
}
@Test
public void testBody() throws Exception
{
OutputEncryptor encryptor = new JceCMSContentEncryptorBuilder(CMSAlgorithm.DES_EDE3_CBC)
.setProvider("BC")
.build();
JceKeyTransRecipientInfoGenerator infoGenerator = new JceKeyTransRecipientInfoGenerator(cert);
CMSEnvelopedDataStreamGenerator generator = new CMSEnvelopedDataStreamGenerator();
generator.addRecipientInfoGenerator(infoGenerator);
InternetHeaders ih = new InternetHeaders();
ih.addHeader("Content-Type", "application/xml");
MimeBodyPart _msg = new MimeBodyPart(ih, "<customer name=\"bill\"/>".getBytes());
ByteArrayOutputStream os = new ByteArrayOutputStream();
OutputStream encrypted = generator.open(os, encryptor);
_msg.writeTo(encrypted);
encrypted.close();
String str = Base64.encodeBytes(os.toByteArray(), Base64.DO_BREAK_LINES);
ih = new InternetHeaders();
ih.addHeader("Content-Disposition", "attachment; filename=\"smime.p7m\"");
ih.addHeader("Content-Type", "application/pkcs7-mime; smime-type=enveloped-data; name=\"smime.p7m\"");
ih.addHeader("Content-Transfer-Encoding", "base64");
MimeBodyPart mp = new MimeBodyPart(ih, str.getBytes());
// output this to smime.txt for decrypt_smime.py
//outputFile(mp);
mp = decode2Mime(mp);
Assert.assertEquals("application/xml", mp.getContentType());
String body = toString(mp.getInputStream());
Assert.assertEquals("<customer name=\"bill\"/>", body.trim());
}
@Test
public void testHeaders()
throws Exception
{
InternetHeaders ih = new InternetHeaders();
ih.addHeader("Content-Type", "application/xml");
MimeBodyPart _msg = new MimeBodyPart(ih, "<customer name=\"bill\"/>".getBytes());
SMIMEEnvelopedGenerator gen = new SMIMEEnvelopedGenerator();
OutputEncryptor encryptor = new JceCMSContentEncryptorBuilder(CMSAlgorithm.DES_EDE3_CBC)
.setProvider("BC")
.build();
gen.addRecipientInfoGenerator(new JceKeyTransRecipientInfoGenerator(cert).setProvider("BC"));
//
// generate a MimeBodyPart object which encapsulates the content
// we want encrypted.
//
MimeBodyPart mp = gen.generate(_msg, encryptor);
output(mp);
}
private void output(MimeBodyPart mp) throws IOException, MessagingException
{
ByteArrayOutputStream os = new ByteArrayOutputStream();
mp.writeTo(os);
String s = new String(os.toByteArray());
System.out.println(s);
}
private void outputFile(MimeBodyPart mp) throws IOException, MessagingException
{
FileOutputStream os = new FileOutputStream("smime.txt");
mp.writeTo(os);
os.close();
}
@Test
public void testFromPythonGenerated() throws Exception
{
InternetHeaders ih = new InternetHeaders();
ih.addHeader("Content-Disposition", "attachment; filename=\"smime.p7m\"");
ih.addHeader("Content-Type", "application/pkcs7-mime; smime-type=enveloped-data; name=\"smime.p7m\"");
ih.addHeader("Content-Transfer-Encoding", "base64");
MimeBodyPart mp = new MimeBodyPart(ih, python_smime.getBytes());
output(mp);
System.out.println("------------");
mp = decode2Mime(mp);
Assert.assertEquals("application/xml", mp.getContentType());
String body = toString(mp.getInputStream());
Assert.assertEquals("<customer name=\"bill\"/>", body.trim());
}
@Test
public void testFromPythonGenerated2() throws Exception
{
ByteArrayInputStream is = new ByteArrayInputStream(python_smime.getBytes("utf-8"));
MimeBodyPart mp = decode2Mime(is);
Assert.assertEquals("application/xml", mp.getContentType());
String body = toString(mp.getInputStream());
Assert.assertEquals("<customer name=\"bill\"/>", body.trim());
}
private static String toString(InputStream is) throws Exception
{
DataInputStream dis = new DataInputStream(is);
byte[] bytes = new byte[dis.available()];
dis.readFully(bytes);
dis.close();
return new String(bytes);
}
private MimeBodyPart decode2Mime(InputStream body) throws MessagingException, CMSException, SMIMEException, NoSuchProviderException, IOException
{
StringBuilder builder = new StringBuilder();
builder.append("Content-Disposition: attachment; filename=\"smime.p7m\"\r\n");
builder.append("Content-Type: application/pkcs7-mime; smime-type=enveloped-data; name=\"smime.p7m\"\r\n");
builder.append("Content-Transfer-Encoding: base64\r\n\r\n");
ByteArrayInputStream is = new ByteArrayInputStream(builder.toString().getBytes("utf-8"));
MimeBodyPart mp = new MimeBodyPart(new SequenceInputStream(is, body));
return decode2Mime(mp);
}
private MimeBodyPart decode2Mime(MimeBodyPart mp) throws MessagingException, CMSException, SMIMEException, NoSuchProviderException, IOException
{
SMIMEEnveloped m = new SMIMEEnveloped(mp);
RecipientId recId = new JceKeyTransRecipientId(cert);
RecipientInformationStore recipients = m.getRecipientInfos();
RecipientInformation recipient = recipients.get(recId);
JceKeyTransRecipient pKeyRecp = new JceKeyTransEnvelopedRecipient(privateKey);
return SMIMEUtil.toMimeBodyPart(recipient.getContent(pKeyRecp));
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.refactoring.copy;
import com.intellij.codeInsight.actions.OptimizeImportsProcessor;
import com.intellij.execution.ExecutionBundle;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.util.EditorHelper;
import com.intellij.java.refactoring.JavaRefactoringBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.JavaProjectRootsUtil;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.impl.file.PsiDirectoryImpl;
import com.intellij.psi.impl.file.UpdateAddedFileProcessor;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.refactoring.MoveDestination;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.move.moveClassesOrPackages.MoveDirectoryWithClassesProcessor;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ArrayUtilRt;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.IOException;
import java.util.*;
public class CopyClassesHandler extends CopyHandlerDelegateBase {
private static final Logger LOG = Logger.getInstance(CopyClassesHandler.class);
@Override
public boolean forbidToClone(PsiElement[] elements, boolean fromUpdate) {
final Map<PsiFile, PsiClass[]> fileMap = convertToTopLevelClasses(elements, fromUpdate, null, null);
if (fileMap != null && fileMap.size() == 1) {
final PsiClass[] psiClasses = fileMap.values().iterator().next();
return psiClasses != null && psiClasses.length > 1;
}
return true;
}
@Override
public boolean canCopy(PsiElement[] elements, boolean fromUpdate) {
return canCopyClass(fromUpdate, elements);
}
@Nullable
@Override
public String getActionName(PsiElement[] elements) {
if (elements.length == 1 && !(elements[0] instanceof PsiPackage) && !(elements[0] instanceof PsiDirectory)) {
return JavaRefactoringBundle.message("copy.handler.copy.class.with.dialog");
}
return JavaRefactoringBundle.message("copy.handler.copy.classes.with.dialog");
}
public static boolean canCopyClass(PsiElement... elements) {
return canCopyClass(false, elements);
}
public static boolean canCopyClass(boolean fromUpdate, PsiElement... elements) {
if (fromUpdate && elements.length > 0 && elements[0] instanceof PsiDirectory) return true;
return convertToTopLevelClasses(elements, fromUpdate, null, null) != null;
}
@Nullable
private static Map<PsiFile, PsiClass[]> convertToTopLevelClasses(final PsiElement[] elements,
final boolean fromUpdate,
String relativePath,
Map<PsiFile, String> relativeMap) {
final Map<PsiFile, PsiClass[]> result = new HashMap<>();
for (PsiElement element : elements) {
final PsiElement navigationElement = element.getNavigationElement();
LOG.assertTrue(navigationElement != null, element);
final PsiFile containingFile = navigationElement.getContainingFile();
if (!(containingFile instanceof PsiClassOwner &&
JavaProjectRootsUtil.isOutsideJavaSourceRoot(containingFile))) {
if (containingFile != null) {
if (PsiPackage.PACKAGE_INFO_CLS_FILE.equals(containingFile.getName()) ||
containingFile.getContainingDirectory() == null) {
continue;
}
}
PsiClass[] topLevelClasses = getTopLevelClasses(element);
if (topLevelClasses == null) {
if (element instanceof PsiDirectory) {
if (!fromUpdate) {
final String name = ((PsiDirectory)element).getName();
final String path = relativePath != null ? (relativePath.length() > 0 ? (relativePath + "/") : "") + name : null;
final Map<PsiFile, PsiClass[]> map = convertToTopLevelClasses(element.getChildren(), false, path, relativeMap);
if (map == null) return null;
for (Map.Entry<PsiFile, PsiClass[]> entry : map.entrySet()) {
fillResultsMap(result, entry.getKey(), entry.getValue());
}
}
continue;
}
if (!(element instanceof PsiFileSystemItem)) return null;
}
fillResultsMap(result, containingFile, topLevelClasses);
if (relativeMap != null) {
relativeMap.put(containingFile, relativePath);
}
}
}
if (result.isEmpty()) {
return null;
}
else {
boolean hasClasses = false;
for (PsiClass[] classes : result.values()) {
if (classes != null) {
hasClasses = true;
break;
}
}
return hasClasses ? result : null;
}
}
@Nullable
private static String normalizeRelativeMap(Map<PsiFile, String> relativeMap) {
String vector = null;
for (String relativePath : relativeMap.values()) {
if (vector == null) {
vector = relativePath;
}
else if (vector.startsWith(relativePath + "/")) {
vector = relativePath;
}
else if (!relativePath.startsWith(vector + "/") && !relativePath.equals(vector)) {
return null;
}
}
if (vector != null) {
for (PsiFile psiFile : relativeMap.keySet()) {
final String path = relativeMap.get(psiFile);
relativeMap.put(psiFile, path.equals(vector) ? "" : path.substring(vector.length() + 1));
}
}
return vector;
}
private static void fillResultsMap(Map<PsiFile, PsiClass[]> result, PsiFile containingFile, PsiClass[] topLevelClasses) {
PsiClass[] classes = result.get(containingFile);
if (topLevelClasses != null) {
if (classes != null) {
topLevelClasses = ArrayUtil.mergeArrays(classes, topLevelClasses, PsiClass.ARRAY_FACTORY);
}
result.put(containingFile, topLevelClasses);
}
else {
result.put(containingFile, classes);
}
}
@Override
public void doCopy(PsiElement[] elements, PsiDirectory defaultTargetDirectory) {
FeatureUsageTracker.getInstance().triggerFeatureUsed("refactoring.copyClass");
final HashMap<PsiFile, String> relativePathsMap = new HashMap<>();
final Map<PsiFile, PsiClass[]> classes = convertToTopLevelClasses(elements, false, "", relativePathsMap);
assert classes != null;
if (defaultTargetDirectory == null) {
final PsiFile psiFile = classes.keySet().iterator().next();
defaultTargetDirectory = psiFile.getContainingDirectory();
LOG.assertTrue(defaultTargetDirectory != null, psiFile);
}
Project project = defaultTargetDirectory.getProject();
if (DumbService.isDumb(elements[0].getProject())) {
DumbService.getInstance(project).showDumbModeNotification(JavaRefactoringBundle.message(
"copy.handler.is.not.available.during.indexing"));
return;
}
VirtualFile sourceRootForFile =
ProjectRootManager.getInstance(project).getFileIndex().getSourceRootForFile(defaultTargetDirectory.getVirtualFile());
if (sourceRootForFile == null) {
final List<PsiElement> files = new ArrayList<>();
for (PsiElement element : elements) {
PsiFile containingFile = element.getContainingFile();
if (containingFile != null) {
files.add(containingFile);
}
else if (element instanceof PsiDirectory) {
files.add(element);
}
}
CopyFilesOrDirectoriesHandler.copyAsFiles(files.toArray(PsiElement.EMPTY_ARRAY), defaultTargetDirectory, project);
return;
}
Object targetDirectory = null;
String className = null;
boolean openInEditor = true;
if (copyOneClass(classes)) {
final String commonPath =
ArrayUtilRt.find(elements, classes.values().iterator().next()) == -1 ? normalizeRelativeMap(relativePathsMap) : null;
CopyClassDialog dialog = new CopyClassDialog(classes.values().iterator().next()[0], defaultTargetDirectory, project, false) {
@Override
protected String getQualifiedName() {
final String qualifiedName = super.getQualifiedName();
if (commonPath != null && !commonPath.isEmpty() && !qualifiedName.endsWith(commonPath)) {
return StringUtil.getQualifiedName(qualifiedName, commonPath.replaceAll("/", "."));
}
return qualifiedName;
}
};
dialog.setTitle(JavaRefactoringBundle.message("copy.handler.copy.class"));
if (dialog.showAndGet()) {
openInEditor = dialog.isOpenInEditor();
targetDirectory = dialog.getTargetDirectory();
className = dialog.getClassName();
if (className == null || className.length() == 0) return;
}
}
else {
if (ApplicationManager.getApplication().isUnitTestMode()) {
targetDirectory = defaultTargetDirectory;
}
else {
defaultTargetDirectory = CopyFilesOrDirectoriesHandler.resolveDirectory(defaultTargetDirectory);
if (defaultTargetDirectory == null) return;
PsiFile[] files = PsiUtilCore.toPsiFileArray(classes.keySet());
final CopyFilesOrDirectoriesDialog dialog = new CopyFilesOrDirectoriesDialog(files, defaultTargetDirectory, project, false) {
@Override
public JComponent getPreferredFocusedComponent() {
return files.length == 1 ? getTargetDirectoryComponent() : super.getPreferredFocusedComponent();
}
};
if (dialog.showAndGet()) {
targetDirectory = dialog.getTargetDirectory();
String newName = dialog.getNewName();
if (files.length == 1) { //strip file extension when multiple classes exist in one file
className = StringUtil.trimEnd(newName, "." + getFileExtension(files[0]));
}
openInEditor = dialog.isOpenInEditor();
}
}
}
if (targetDirectory != null) {
copyClassesImpl(className, project, classes, relativePathsMap, targetDirectory, defaultTargetDirectory, JavaRefactoringBundle.message(
"copy.handler.copy.class"), false, openInEditor);
}
}
private static boolean copyOneClass(Map<PsiFile, PsiClass[]> classes) {
if (classes.size() == 1) {
final PsiClass[] psiClasses = classes.values().iterator().next();
return psiClasses != null && psiClasses.length == 1;
}
return false;
}
@Override
public void doClone(PsiElement element) {
FeatureUsageTracker.getInstance().triggerFeatureUsed("refactoring.copyClass");
PsiClass[] classes = getTopLevelClasses(element);
if (classes == null) {
CopyFilesOrDirectoriesHandler.doCloneFile(element);
return;
}
Project project = element.getProject();
CopyClassDialog dialog = new CopyClassDialog(classes[0], null, project, true);
dialog.setTitle(JavaRefactoringBundle.message("copy.handler.clone.class"));
if (dialog.showAndGet()) {
String className = dialog.getClassName();
PsiDirectory targetDirectory = element.getContainingFile().getContainingDirectory();
copyClassesImpl(className, project, Collections.singletonMap(classes[0].getContainingFile(), classes), null, targetDirectory,
targetDirectory, JavaRefactoringBundle.message("copy.handler.clone.class"), true, true);
}
}
private static void copyClassesImpl(final String copyClassName,
final Project project,
final Map<PsiFile, PsiClass[]> classes,
final HashMap<PsiFile, String> map,
final Object targetDirectory,
final PsiDirectory defaultTargetDirectory,
final @NlsContexts.Command String commandName,
final boolean selectInActivePanel,
final boolean openInEditor) {
Runnable command = () -> {
PsiDirectory target;
if (targetDirectory instanceof PsiDirectory) {
target = (PsiDirectory)targetDirectory;
}
else {
target = WriteAction.compute(() -> ((MoveDestination)targetDirectory).getTargetDirectory(defaultTargetDirectory));
}
try {
Collection<PsiFile> files = doCopyClasses(classes, map, copyClassName, target, project);
if (openInEditor) {
for (PsiFile file : files) {
CopyHandler.updateSelectionInActiveProjectView(file, project, selectInActivePanel);
}
EditorHelper.openFilesInEditor(files.toArray(PsiFile.EMPTY_ARRAY));
}
}
catch (IncorrectOperationException ex) {
Messages.showMessageDialog(project, ex.getMessage(), RefactoringBundle.message("error.title"), Messages.getErrorIcon());
}
};
CommandProcessor processor = CommandProcessor.getInstance();
processor.executeCommand(project, command, commandName, null);
}
@NotNull
public static Collection<PsiFile> doCopyClasses(final Map<PsiFile, PsiClass[]> fileToClasses,
final String copyClassName,
final PsiDirectory targetDirectory,
final Project project) throws IncorrectOperationException {
return doCopyClasses(fileToClasses, null, copyClassName, targetDirectory, project);
}
@NotNull
public static Collection<PsiFile> doCopyClasses(final Map<PsiFile, PsiClass[]> fileToClasses,
@Nullable HashMap<PsiFile, String> map, final String copyClassName,
final PsiDirectory targetDirectory,
final Project project) throws IncorrectOperationException {
final Map<PsiClass, PsiElement> oldToNewMap = new HashMap<>();
for (final PsiClass[] psiClasses : fileToClasses.values()) {
if (psiClasses != null) {
for (PsiClass aClass : psiClasses) {
if (isSynthetic(aClass)) {
continue;
}
oldToNewMap.put(aClass, null);
}
}
}
final List<PsiFile> createdFiles = new ArrayList<>(fileToClasses.size());
int[] choice = fileToClasses.size() > 1 ? new int[]{-1} : null;
List<PsiFile> files = new ArrayList<>();
((PsiDirectoryImpl)targetDirectory).executeWithUpdatingAddedFilesDisabled(() -> {
for (final Map.Entry<PsiFile, PsiClass[]> entry : fileToClasses.entrySet()) {
final PsiFile psiFile = entry.getKey();
final PsiClass[] sources = entry.getValue();
if (psiFile instanceof PsiClassOwner && sources != null) {
final PsiFile createdFile = copy(psiFile, targetDirectory, copyClassName, map == null ? null : map.get(psiFile), choice);
if (createdFile == null) {
//do not touch unmodified classes
for (PsiClass aClass : ((PsiClassOwner)psiFile).getClasses()) {
oldToNewMap.remove(aClass);
}
continue;
}
Map<PsiClass, PsiClass> sourceToDestination = new LinkedHashMap<>();
for (final PsiClass destination : ((PsiClassOwner)createdFile).getClasses()) {
if (!isSynthetic(destination)) {
PsiClass source = findByName(sources, destination.getName());
if (source == null) {
WriteAction.run(() -> destination.delete());
}
else {
sourceToDestination.put(source, destination);
}
}
}
for (final Map.Entry<PsiClass, PsiClass> classEntry : sourceToDestination.entrySet()) {
final PsiClass copy = copy(classEntry.getKey(), sourceToDestination.size() > 1 ? null : copyClassName);
PsiElement newElement = WriteAction.compute(() -> classEntry.getValue().replace(copy));
oldToNewMap.put(classEntry.getKey(), newElement);
}
createdFiles.add(createdFile);
}
else {
files.add(psiFile);
}
}
});
DumbService.getInstance(project).completeJustSubmittedTasks();
WriteAction.run(() -> UpdateAddedFileProcessor.updateAddedFiles(createdFiles));
for (PsiFile file : files) {
try {
PsiDirectory finalTarget = targetDirectory;
final String relativePath = map != null ? map.get(file) : null;
if (relativePath != null && !relativePath.isEmpty()) {
finalTarget = WriteAction.compute(() -> buildRelativeDir(targetDirectory, relativePath).findOrCreateTargetDirectory());
}
final PsiFile fileCopy = CopyFilesOrDirectoriesHandler.copyToDirectory(file, getNewFileName(file, copyClassName), finalTarget, choice, null);
if (fileCopy != null) {
createdFiles.add(fileCopy);
}
}
catch (IOException e) {
throw new IncorrectOperationException(e.getMessage());
}
}
WriteAction.run(() -> {
final Set<PsiElement> rebindExpressions = new HashSet<>();
for (PsiElement element : oldToNewMap.values()) {
if (element == null) {
LOG.error(oldToNewMap.keySet());
continue;
}
decodeRefs(element, oldToNewMap, rebindExpressions);
}
final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project);
for (PsiFile psiFile : createdFiles) {
if (psiFile instanceof PsiJavaFile) {
codeStyleManager.removeRedundantImports((PsiJavaFile)psiFile);
}
}
for (PsiElement expression : rebindExpressions) {
//filter out invalid elements which are produced by nested elements:
//new expressions/type elements, like: List<List<String>>; new Foo(new Foo()), etc
if (expression.isValid()) {
codeStyleManager.shortenClassReferences(expression);
}
}
});
new OptimizeImportsProcessor(project, createdFiles.toArray(PsiFile.EMPTY_ARRAY), null).run();
return createdFiles;
}
protected static boolean isSynthetic(PsiClass aClass) {
return aClass instanceof SyntheticElement || !aClass.isPhysical();
}
private static PsiFile copy(@NotNull PsiFile file, PsiDirectory directory, String name, String relativePath, int[] choice) {
final String fileName = getNewFileName(file, name);
if (relativePath != null && !relativePath.isEmpty()) {
return WriteAction.compute(() -> buildRelativeDir(directory, relativePath).findOrCreateTargetDirectory().copyFileFrom(fileName, file));
}
if (CopyFilesOrDirectoriesHandler.checkFileExist(directory, choice, file, fileName,
ExecutionBundle.message("copy.classes.command.name"))) return null;
return WriteAction.compute(() -> directory.copyFileFrom(fileName, file));
}
private static String getNewFileName(PsiFile file, String name) {
if (name != null) {
String fileExtension = getFileExtension(file);
return fileExtension.isEmpty() ? name : StringUtil.getQualifiedName(name, fileExtension);
}
return file.getName();
}
private static String getFileExtension(PsiFile file) {
if (file instanceof PsiClassOwner) {
for (final PsiClass psiClass : ((PsiClassOwner)file).getClasses()) {
if (!isSynthetic(psiClass)) {
return file.getViewProvider().getVirtualFile().getExtension();
}
}
}
return "";
}
@NotNull
private static MoveDirectoryWithClassesProcessor.TargetDirectoryWrapper buildRelativeDir(final @NotNull PsiDirectory directory,
final @NotNull String relativePath) {
MoveDirectoryWithClassesProcessor.TargetDirectoryWrapper current = null;
for (String pathElement : relativePath.split("/")) {
if (current == null) {
current = new MoveDirectoryWithClassesProcessor.TargetDirectoryWrapper(directory, pathElement);
}
else {
current = new MoveDirectoryWithClassesProcessor.TargetDirectoryWrapper(current, pathElement);
}
}
LOG.assertTrue(current != null);
return current;
}
private static PsiClass copy(PsiClass aClass, String name) {
final PsiClass classNavigationElement = (PsiClass)aClass.getNavigationElement();
final PsiClass classCopy = (PsiClass)classNavigationElement.copy();
if (name != null) {
classCopy.setName(name);
}
return classCopy;
}
@Nullable
private static PsiClass findByName(PsiClass[] classes, String name) {
if (name != null) {
for (PsiClass aClass : classes) {
if (name.equals(aClass.getName())) {
return aClass;
}
}
}
return null;
}
private static void rebindExternalReferences(PsiElement element,
Map<PsiClass, PsiElement> oldToNewMap,
Set<? super PsiElement> rebindExpressions) {
final LocalSearchScope searchScope = new LocalSearchScope(element);
for (PsiClass aClass : oldToNewMap.keySet()) {
final PsiElement newClass = oldToNewMap.get(aClass);
for (PsiReference reference : ReferencesSearch.search(aClass, searchScope)) {
rebindExpressions.add(reference.bindToElement(newClass));
}
}
}
private static void decodeRefs(@NotNull PsiElement element,
final Map<PsiClass, PsiElement> oldToNewMap,
final Set<? super PsiElement> rebindExpressions) {
final Map<PsiJavaCodeReferenceElement, PsiElement> rebindMap = new LinkedHashMap<>();
element.accept(new JavaRecursiveElementVisitor() {
@Override
public void visitReferenceElement(PsiJavaCodeReferenceElement reference) {
super.visitReferenceElement(reference);
decodeRef(reference, oldToNewMap, rebindMap);
}
});
for (Map.Entry<PsiJavaCodeReferenceElement, PsiElement> entry : rebindMap.entrySet()) {
rebindExpressions.add(entry.getKey().bindToElement(entry.getValue()));
}
rebindExternalReferences(element, oldToNewMap, rebindExpressions);
}
private static void decodeRef(final PsiJavaCodeReferenceElement expression,
final Map<PsiClass, PsiElement> oldToNewMap,
Map<PsiJavaCodeReferenceElement, PsiElement> rebindExpressions) {
final PsiElement resolved = expression.resolve();
if (resolved instanceof PsiClass) {
final PsiClass psiClass = (PsiClass)resolved;
if (oldToNewMap.containsKey(psiClass)) {
rebindExpressions.put(expression, oldToNewMap.get(psiClass));
}
}
}
private static PsiClass @Nullable [] getTopLevelClasses(PsiElement element) {
while (true) {
if (element == null || element instanceof PsiFile) break;
if (element instanceof PsiClass &&
element.getParent() != null &&
((PsiClass)element).getContainingClass() == null &&
!(element instanceof PsiAnonymousClass)) {
break;
}
element = element.getParent();
}
if (element instanceof PsiClassOwner) {
PsiClass[] classes = ((PsiClassOwner)element).getClasses();
ArrayList<PsiClass> buffer = new ArrayList<>();
for (final PsiClass aClass : classes) {
if (isSynthetic(aClass)) {
return null;
}
buffer.add(aClass);
}
return buffer.toArray(PsiClass.EMPTY_ARRAY);
}
return element instanceof PsiClass ? new PsiClass[]{(PsiClass)element} : null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.cli;
import java.util.ArrayList;
import java.util.List;
import static org.apache.tajo.cli.ParsedResult.StatementType.META;
import static org.apache.tajo.cli.ParsedResult.StatementType.STATEMENT;
/**
* This is a parser used in tsql to parse multiple SQL lines into SQL statements.
* It helps tsql recognizes the termination of each SQL statement and quotation mark (') while
* parses multiple separate lines.
*/
public class SimpleParser {
public static enum ParsingState {
TOK_START, // Start State
META, // Meta Command
STATEMENT, // Statement
WITHIN_QUOTE, // Within Quote
INVALID, // Invalid Statement
STATEMENT_EOS, // End State (End of Statement)
META_EOS // End State (End of Statement)
}
ParsingState state = START_STATE;
int lineNum;
/**
* It will be used to store a query statement into Jline history.
* the query statement for history does not include unnecessary white spaces and new line.
*/
private StringBuilder historyAppender = new StringBuilder();
/**
* It will be used to submit a query statement to the TajoMaster. It just contains a raw query statement string.
*/
private StringBuilder rawAppender = new StringBuilder();
public static final ParsingState START_STATE = ParsingState.TOK_START;
/**
* <h2>State Machine</h2>
* All whitespace are ignored in all cases except for
*
* <pre>
* (start) TOK_START --> META ---------------------> META_EOS
* |
* |
* |
* |-----------> STATEMENT ----------> STMT_EOS
* \ ^
* \ /
* \-> WITHIN_QUOTE
* \ ^
* \---/
* </pre>
*/
public static List<ParsedResult> parseScript(String str) throws InvalidStatementException {
SimpleParser parser = new SimpleParser();
List<ParsedResult> parsedResults = new ArrayList<ParsedResult>();
parsedResults.addAll(parser.parseLines(str));
parsedResults.addAll(parser.EOF());
return parsedResults;
}
public List<ParsedResult> parseLines(String str) throws InvalidStatementException {
List<ParsedResult> statements = new ArrayList<ParsedResult>();
int lineStartIdx;
int idx = 0;
char [] chars = str.toCharArray();
// if parsing continues, it means that the previous line is broken by '\n'.
// So, we should add new line to rawAppender.
if (isStatementContinue()) {
rawAppender.append("\n");
}
while(idx < str.length()) {
// initialization for new statement
if (state == ParsingState.TOK_START) {
lineNum = 0;
// ignore all whitespace before start
if (Character.isWhitespace(chars[idx])) {
idx++;
continue;
}
}
////////////////////////////
// TOK_START --> META
////////////////////////////
lineStartIdx = idx;
if (state == ParsingState.TOK_START && chars[idx] == '\\') {
state = ParsingState.META;
////////////////////////////
// META --> TOK_EOS
////////////////////////////
while (state != ParsingState.META_EOS && idx < chars.length) {
char character = chars[idx++];
if (isEndOfMeta(character)) {
state = ParsingState.META_EOS;
} else if (Character.isWhitespace(character)) {
// skip
}
}
if (state == ParsingState.META_EOS) {
historyAppender.append(str.subSequence(lineStartIdx, idx - 1).toString());
appendToRawStatement(str.subSequence(lineStartIdx, idx - 1).toString(), true);
} else {
historyAppender.append(str.subSequence(lineStartIdx, idx).toString());
appendToRawStatement(str.subSequence(lineStartIdx, idx).toString(), true);
}
} else if (isInlineCommentStart(chars, idx)) {
idx = consumeInlineComment(chars, idx);
appendToRawStatement(str.subSequence(lineStartIdx, idx).toString(), true);
/////////////////////////////////
// TOK_START -> STATEMENT
// or TOK_STATEMENT -> STATEMENT
////////////////////////////////
} else if (isStatementContinue() || isStatementStart(chars[idx])) {
if (!isStatementContinue()) { // TOK_START -> STATEMENT
state = ParsingState.STATEMENT;
rawAppender.append("\n");
}
while (!isTerminateState(state) && idx < chars.length) {
char character = chars[idx++];
///////////////////////////////////////////////////////
// in-statement loop BEGIN
///////////////////////////////////////////////////////
if (isEndOfStatement(character)) {
state = ParsingState.STATEMENT_EOS;
} else if (state == ParsingState.STATEMENT && character == '\n') {
appendToBothStatements(chars, lineStartIdx, idx, 1); // omit new line chacter '\n' from history statement
lineStartIdx = idx;
} else if (state == ParsingState.STATEMENT && character == '\'') { // TOK_STATEMENT -> WITHIN_QUOTE
state = ParsingState.WITHIN_QUOTE;
if (idx < chars.length) {
character = chars[idx++];
} else {
continue;
}
// idx points the characters followed by the current character. So, we should use 'idx - 1'
// in order to point the current character.
} else if (state == ParsingState.STATEMENT && idx < chars.length && isInlineCommentStart(chars, idx - 1)) {
idx++;
appendToBothStatements(chars, lineStartIdx, idx, 2); // omit two dash characters '--' from history statement
int commentStartIdx = idx;
idx = consumeInlineComment(chars, idx);
appendToRawStatement(str.subSequence(commentStartIdx, idx).toString(), true);
lineStartIdx = idx;
}
///////////////////////////////////////////////////////
// in-statement loop END
///////////////////////////////////////////////////////
if (state == ParsingState.WITHIN_QUOTE) {
while(idx < chars.length) {
///////////////////////////////
// WITHIN_QUOTE --> STATEMENT
///////////////////////////////
if (character == '\'') {
state = ParsingState.STATEMENT;
break;
}
character = chars[idx++];
}
if (state == ParsingState.WITHIN_QUOTE && character == '\'') {
state = ParsingState.STATEMENT;
}
}
}
// After all characters are consumed
if (state == ParsingState.STATEMENT_EOS) { // If one query statement is terminated
appendToBothStatements(chars, lineStartIdx, idx - 1); // skip semicolon (;)
} else {
appendToBothStatements(chars, lineStartIdx, idx);
// if it is not within quote and there is no space between lines, adds a space.
if (state == ParsingState.STATEMENT && (historyAppender.charAt(historyAppender.length() - 1) != ' ')) {
historyAppender.append(" ");
rawAppender.append("\n");
}
}
} else { // skip unknown character
idx++;
}
lineNum++;
statements.addAll(doProcessEndOfStatement(state == ParsingState.META));
}
return statements;
}
/**
* Append the range of characters into a given StringBuilder instance.
*
* @param chars Characters
* @param fromIdx start character index
* @param toIdx end character index
*/
private void appendToStatement(StringBuilder builder, char[] chars, int fromIdx, int toIdx) {
builder.append(chars, fromIdx, toIdx - fromIdx);
}
/**
* Append the range of characters into both history and raw appenders. It omits the number of characters specified by
* <code>omitCharNums</code>.
*
*
* @param chars Characters
* @param fromIdx start character index
* @param toIdx end character index
* @param omitCharNums how many characters will be omitted from history statement
*/
private void appendToBothStatements(char[] chars, int fromIdx, int toIdx, int omitCharNums) {
appendToStatement(historyAppender, chars, fromIdx, toIdx - omitCharNums);
if (historyAppender.charAt(historyAppender.length() - 1) != ' ') {
historyAppender.append(" ");
}
appendToStatement(rawAppender, chars, fromIdx, toIdx);
}
/**
* Append the range of characters into both history and raw appenders.
*
*
* @param chars Characters
* @param fromIdx start character index
* @param toIdx end character index
*/
private void appendToBothStatements(char[] chars, int fromIdx, int toIdx) {
historyAppender.append(chars, fromIdx, toIdx - fromIdx);
rawAppender.append(chars, fromIdx, toIdx - fromIdx);
}
private int consumeInlineComment(char [] chars, int currentIdx) {
currentIdx++;
while (currentIdx < chars.length && !isNewLine(chars[currentIdx])) {
currentIdx++;
}
return currentIdx;
}
private void appendToRawStatement(String str, boolean addLF) {
if (!str.isEmpty() && !"\n".equals(str) &&
rawAppender.length() > 0 && addLF && rawAppender.charAt(rawAppender.length() - 1) != '\n') {
rawAppender.append(str);
} else {
rawAppender.append(str);
}
}
private static boolean isEndOfMeta(char character) {
return character == ';' || character == '\n';
}
private static boolean isEndOfStatement(char character) {
return character == ';';
}
/**
* It checks if inline comment '--' begins.
* @param chars
* @param idx
* @return
*/
private boolean isInlineCommentStart(char[] chars, int idx) {
if (idx >= chars.length - 1) {
return false;
}
return (state == ParsingState.STATEMENT || state == ParsingState.TOK_START) &&
(chars[idx] == '-' && chars[idx + 1] == '-');
}
private boolean isNewLine(char character) {
return character == '\n';
}
private boolean isStatementStart(char character) {
return state == ParsingState.TOK_START && (Character.isLetterOrDigit(character));
}
private boolean isStatementContinue() {
return state == ParsingState.WITHIN_QUOTE || state == ParsingState.STATEMENT;
}
/**
* process all parsed statements so far and return a list of parsed results.
*
* @param endOfFile TRUE if the end of file.
* @return the list of parsed results, each of result contains one query statement or meta command.
* @throws InvalidStatementException
*/
private List<ParsedResult> doProcessEndOfStatement(boolean endOfFile) throws InvalidStatementException {
List<ParsedResult> parsedResults = new ArrayList<ParsedResult>();
String errorMessage = "";
if (endOfFile) {
if (state == ParsingState.META) {
state = ParsingState.META_EOS;
} else if (state == ParsingState.STATEMENT) {
state = ParsingState.STATEMENT_EOS;
} else if (state == ParsingState.WITHIN_QUOTE) {
state = ParsingState.INVALID;
errorMessage = "unterminated quoted string at LINE " + lineNum;
}
}
if (isTerminateState(state)) {
String historyStatement = historyAppender.toString();
String rawStatement = rawAppender.toString();
if (state == ParsingState.META_EOS) {
parsedResults.add(new ParsedResult(META, rawStatement, historyStatement));
state = ParsingState.TOK_START;
} else if (state == ParsingState.STATEMENT_EOS) {
parsedResults.add(new ParsedResult(STATEMENT, rawStatement, historyStatement));
} else {
throw new InvalidStatementException("ERROR: " + errorMessage);
}
// reset all states
historyAppender.delete(0, historyAppender.length());
rawAppender.delete(0, rawAppender.length());
state = START_STATE;
}
return parsedResults;
}
/**
* It manually triggers the end of file.
*
* @return the list of parsed results, each of result contains one query statement or meta command.
* @throws InvalidStatementException
*/
public List<ParsedResult> EOF() throws InvalidStatementException {
return doProcessEndOfStatement(true);
}
private static boolean isTerminateState(ParsingState state) {
return (state == ParsingState.META_EOS || state == ParsingState.STATEMENT_EOS || state == ParsingState.INVALID);
}
public ParsingState getState() {
return state;
}
public String toString() {
return "[" + state.name() + "]: " + historyAppender.toString();
}
}
| |
/*
* Created on 21/02/2004
*
* By: plh@pha.com.au
*
* $Id$
*
*/
import com.perfdynamics.pdq.*;
import com.braju.format.*; // Used for fprintf/sprintf!
/**
* @author plh
*
* To change the template for this generated type comment go to
* Window>Preferences>Java>Code Generation>Code and Comments
*/
public class Ch8_Scaleup {
static class Device {
int id;
String label;
};
public static void main(String[] args) {
Parameters p = new Parameters();
// ****************************************************
// Model Parameters
// ****************************************************/
// Name of this model ...
String scenario = "C/S Baseline";
// Useful multipliers ...
int K = 1024;
double MIPS = 1E6;
double Mbps = 1E6;
// Model parameters ...
int USERS = 125;
int FS_DISKS = 1;
int MF_DISKS = 4;
double PC_MIPS = (27 * MIPS);
double FS_MIPS = (41 * MIPS);
double GW_MIPS = (10 * MIPS);
double MF_MIPS = (21 * MIPS);
double TR_Mbps = (4 * Mbps);
double TR_fact = (2.5); // fudge factor
int MAXPROC = 20;
int MAXDEV = 50;
// Computing Device IDs
int PC = 0;
int FS = 1;
int GW = 2;
int MF = 3;
int TR = 4;
int FDA = 10;
int MDA = 20;
// Transaction IDs
int CD = 0; // Category Display
int RQ = 1; // Remote Query
int SU = 2; // Status Update
// Process IDs from 1993 paper
int CD_Req = 1;
int CD_Rpy = 15;
int RQ_Req = 2;
int RQ_Rpy = 16;
int SU_Req = 3;
int SU_Rpy = 17;
int Req_CD = 4;
int Req_RQ = 5;
int Req_SU = 6;
int CD_Msg = 12;
int RQ_Msg = 13;
int SU_Msg = 14;
int GT_Snd = 7;
int GT_Rcv = 11;
int MF_CD = 8;
int MF_RQ = 9;
int MF_SU = 10;
int LAN_Tx = 18;
int noNodes;
int noStreams;
double[][] demand = new double[20][30];
// This should go into pdq.Lib.h ...
// typedef struct {
// int id
// char label[MAXCHARS]
// } devarray_type
// Variable we plan to import...
// txCD[MAXCHARS],
// txRQ[MAXCHARS],
// txSU[MAXCHARS]
// demand[MAXPROC][MAXDEV],
// util[MAXDEV],
// X
// ulan,
// ufs,
// udsk[MAXDEV],
// uws,
// ugw,
// umf,
// udasd[MAXDEV]
// work,
// dev,
// i,
// j
//
// // This should go into pdq.Build.c ...
// devarray_type *FDarray
// devarray_type *MDarray
// if ((FDarray = (devarray_type *) calloc(sizeof(devarray_type), 10))
// == NULL)
// errmsg("", "FDarray allocation failed!\n")
// if ((MDarray = (devarray_type *) calloc(sizeof(devarray_type), 10))
// == NULL)
// errmsg("", "MDarray allocation failed!\n")
Device[] FDarray = new Device[FS_DISKS];
Device[] MDarray = new Device[MF_DISKS];
for (int i = 0; i < FS_DISKS; i++) {
FDarray[i] = new Device();
FDarray[i].id = FDA + i;
FDarray[i].label = Format.sprintf("FD%d", p.add(i));
}
for (int i = 0; i < MF_DISKS; i++) {
MDarray[i] = new Device();
MDarray[i].id = MDA + i;
MDarray[i].label = Format.sprintf("MD%d", p.add(i));
}
// CPU service times are calculated from MIPS Instruction counts in
// tables presented in original 1993 CMG paper.
demand[CD_Req][PC] = 200 * K / PC_MIPS;
demand[CD_Rpy][PC] = 100 * K / PC_MIPS;
demand[RQ_Req][PC] = 150 * K / PC_MIPS;
demand[RQ_Rpy][PC] = 200 * K / PC_MIPS;
demand[SU_Req][PC] = 300 * K / PC_MIPS;
demand[SU_Rpy][PC] = 300 * K / PC_MIPS;
demand[Req_CD][FS] = 50 * K / FS_MIPS;
demand[Req_RQ][FS] = 70 * K / FS_MIPS;
demand[Req_SU][FS] = 10 * K / FS_MIPS;
demand[CD_Msg][FS] = 35 * K / FS_MIPS;
demand[RQ_Msg][FS] = 35 * K / FS_MIPS;
demand[SU_Msg][FS] = 35 * K / FS_MIPS;
demand[GT_Snd][GW] = 50 * K / GW_MIPS;
demand[GT_Rcv][GW] = 50 * K / GW_MIPS;
demand[MF_CD][MF] = 50 * K / MF_MIPS;
demand[MF_RQ][MF] = 150 * K / MF_MIPS;
demand[MF_SU][MF] = 20 * K / MF_MIPS;
// packets generated at each of the following sources ...
demand[LAN_Tx][PC] = 2 * K * TR_fact / TR_Mbps;
demand[LAN_Tx][FS] = 2 * K * TR_fact / TR_Mbps;
demand[LAN_Tx][GW] = 2 * K * TR_fact / TR_Mbps;
// File server Disk I/Os = //accesses x caching / (max IOs/Sec)
for (int i = 0; i < FS_DISKS; i++) {
demand[Req_CD][FDarray[i].id] = (1.0 * 0.5 / 128.9) / FS_DISKS;
demand[Req_RQ][FDarray[i].id] = (1.5 * 0.5 / 128.9) / FS_DISKS;
demand[Req_SU][FDarray[i].id] = (0.2 * 0.5 / 128.9) / FS_DISKS;
demand[CD_Msg][FDarray[i].id] = (1.0 * 0.5 / 128.9) / FS_DISKS;
demand[RQ_Msg][FDarray[i].id] = (1.5 * 0.5 / 128.9) / FS_DISKS;
demand[SU_Msg][FDarray[i].id] = (0.5 * 0.5 / 128.9) / FS_DISKS;
}
// Mainframe DASD I/Os = (//accesses / (max IOs/Sec)) / //disks
for (int i = 0; i < MF_DISKS; i++) {
demand[MF_CD][MDarray[i].id] = (2.0 / 60.24) / MF_DISKS;
demand[MF_RQ][MDarray[i].id] = (4.0 / 60.24) / MF_DISKS;
demand[MF_SU][MDarray[i].id] = (1.0 / 60.24) / MF_DISKS;
}
// Start building the PDQ model ...
PDQ pdq = new PDQ();
pdq.Init(scenario);
// Define physical resources as queues ...
noNodes = pdq.CreateNode("PC", Node.CEN, QDiscipline.FCFS);
noNodes = pdq.CreateNode("FS", Node.CEN, QDiscipline.FCFS);
for (int i = 0; i < FS_DISKS; i++) {
noNodes = pdq.CreateNode(FDarray[i].label, Node.CEN,
QDiscipline.FCFS);
}
noNodes = pdq.CreateNode("GW", Node.CEN, QDiscipline.FCFS);
noNodes = pdq.CreateNode("MF", Node.CEN, QDiscipline.FCFS);
for (int i = 0; i < MF_DISKS; i++) {
noNodes = pdq.CreateNode(MDarray[i].label, Node.CEN,
QDiscipline.FCFS);
}
noNodes = pdq.CreateNode("TR", Node.CEN, QDiscipline.FCFS);
// NOTE: Althought the Token Ring LAN is a passive computational device,
// it is treated as a separate node so as to agree to the results
// presented in the original CMG'93 paper.
// Assign transaction names ...
String txCD = "CatDsply";
String txRQ = "RemQuote";
String txSU = "StatusUp";
// Define an OPEN circuit aggregate workload ...
noStreams = pdq.CreateOpen(txCD, USERS * 4.0 / 60.0);
noStreams = pdq.CreateOpen(txRQ, USERS * 8.0 / 60.0);
noStreams = pdq.CreateOpen(txSU, USERS * 1.0 / 60.0);
// Define the service demands on each physical resource ...
// CD request + reply chain from workflow diagram
pdq
.SetDemand("PC", txCD, demand[CD_Req][PC]
+ (5 * demand[CD_Rpy][PC]));
pdq
.SetDemand("FS", txCD, demand[Req_CD][FS]
+ (5 * demand[CD_Msg][FS]));
for (int i = 0; i < FS_DISKS; i++) {
pdq.SetDemand(FDarray[i].label, txCD, demand[Req_CD][FDarray[i].id]
+ (5 * demand[CD_Msg][FDarray[i].id]));
}
pdq
.SetDemand("GW", txCD, demand[GT_Snd][GW]
+ (5 * demand[GT_Rcv][GW]));
pdq.SetDemand("MF", txCD, demand[MF_CD][MF]);
for (int i = 0; i < MF_DISKS; i++) {
pdq.SetDemand(MDarray[i].label, txCD, demand[MF_CD][MDarray[i].id]);
}
// NOTE: Synchronous process execution caimports data for the CD
// transaction to cross the LAN 12 times as depicted in the following
// parameterization of pdq.SetDemand.
pdq.SetDemand("TR", txCD, (1 * demand[LAN_Tx][PC])
+ (1 * demand[LAN_Tx][FS]) + (5 * demand[LAN_Tx][GW])
+ (5 * demand[LAN_Tx][FS]));
// RQ request + reply chain ...
pdq
.SetDemand("PC", txRQ, demand[RQ_Req][PC]
+ (3 * demand[RQ_Rpy][PC]));
pdq
.SetDemand("FS", txRQ, demand[Req_RQ][FS]
+ (3 * demand[RQ_Msg][FS]));
for (int i = 0; i < FS_DISKS; i++) {
pdq.SetDemand(FDarray[i].label, txRQ, demand[Req_RQ][FDarray[i].id]
+ (3 * demand[RQ_Msg][FDarray[i].id]));
}
pdq
.SetDemand("GW", txRQ, demand[GT_Snd][GW]
+ (3 * demand[GT_Rcv][GW]));
pdq.SetDemand("MF", txRQ, demand[MF_RQ][MF]);
for (int i = 0; i < MF_DISKS; i++) {
pdq.SetDemand(MDarray[i].label, txRQ, demand[MF_RQ][MDarray[i].id]);
}
pdq.SetDemand("TR", txRQ, (1 * demand[LAN_Tx][PC])
+ (1 * demand[LAN_Tx][FS]) + (3 * demand[LAN_Tx][GW])
+ (3 * demand[LAN_Tx][FS]));
// SU request + reply chain ...
pdq.SetDemand("PC", txSU, demand[SU_Req][PC] + demand[SU_Rpy][PC]);
pdq.SetDemand("TR", txSU, demand[LAN_Tx][PC]);
pdq.SetDemand("FS", txSU, demand[Req_SU][FS] + demand[SU_Msg][FS]);
for (int i = 0; i < FS_DISKS; i++) {
pdq.SetDemand(FDarray[i].label, txSU, demand[Req_SU][FDarray[i].id]
+ demand[SU_Msg][FDarray[i].id]);
}
pdq.SetDemand("TR", txSU, demand[LAN_Tx][FS]);
pdq.SetDemand("GW", txSU, demand[GT_Snd][GW] + demand[GT_Rcv][GW]);
pdq.SetDemand("MF", txSU, demand[MF_SU][MF]);
for (int i = 0; i < MF_DISKS; i++) {
pdq.SetDemand(MDarray[i].label, txSU, demand[MF_SU][MDarray[i].id]);
}
pdq.SetDemand("TR", txSU, (1 * demand[LAN_Tx][PC])
+ (1 * demand[LAN_Tx][FS]) + (1 * demand[LAN_Tx][GW])
+ (1 * demand[LAN_Tx][FS]));
boolean DEBUG = false;
pdq.Solve(Methods.CANON);
// pdq.Report();
// Break out Tx response times and device utilizations ...
Format.printf("*** PDQ Breakout \"%s\" (%d clients) ***\n\n", p.add(
scenario).add(USERS));
double[] util = new double[noNodes];
for (int dev = 0; dev < noNodes; dev++) {
util[dev] = 0.0; // reset array
for (int work = 0; work < noStreams; work++) {
util[dev] += 100 * pdq.GetUtilization(pdq.node[dev].devname,
pdq.job[work].trans.name, Job.TRANS);
}
}
// Order of print out follows that in 1993 CMG paper.
Format.printf("Transaction \tLatency(Secs)\n");
Format.printf("----------- \t-----------\n");
for (int work = 0; work < noStreams; work++) {
Format.printf("%s\t%7.4f\n", p.add(pdq.job[work].trans.name).add(
pdq.job[work].trans.sys.response));
}
Format.printf("\n\n");
double uws = 0.0;
double ugw = 0.0;
double ufs = 0.0;
double umf = 0.0;
double ulan = 0.0;
double[] udsk = new double[FS_DISKS];
double[] udasd = new double[MF_DISKS];
for (int dev = 0; dev < noNodes; dev++) {
if (pdq.node[dev].devname.equals("PC")) {
uws += util[dev];
}
if (pdq.node[dev].devname.equals("GW")) {
ugw += util[dev];
}
if (pdq.node[dev].devname.equals("FS")) {
ufs += util[dev];
}
for (int i = 0; i < FS_DISKS; i++) {
if (pdq.node[dev].devname.equals(FDarray[i].label)) {
udsk[i] += util[dev];
}
}
if (pdq.node[dev].devname.equals("MF")) {
umf += util[dev];
}
for (int i = 0; i < MF_DISKS; i++) {
if (pdq.node[dev].devname.equals(MDarray[i].label)) {
udasd[i] += util[dev];
}
}
if (pdq.node[dev].devname.equals("TR")) {
ulan += util[dev];
}
}
Format.printf("Node \t%% Utilization\n");
Format.printf("---- \t--------------\n");
Format.printf("%s\t%7.4f\n", p.add("Token Ring ").add(ulan));
Format.printf("%s\t%7.4f\n", p.add("Desktop PC ").add(uws / USERS));
Format.printf("%s\t%7.4f\n", p.add("FileServer ").add(ufs));
for (int i = 0; i < FS_DISKS; i++) {
Format.printf("%s%d\t%7.4f\n", p.add("FS Disk").add(FDarray[i].id)
.add(udsk[i]));
}
Format.printf("%s\t%7.4f\n", p.add("Gateway SNA").add(ugw));
Format.printf("%s\t%7.4f\n", p.add("Mainframe ").add(umf));
for (int i = 0; i < MF_DISKS; i++) {
Format.printf("%s%d\t%7.4f\n", p.add("MFrame DASD").add(
MDarray[i].id).add(udasd[i]));
}
} // main
} // Ch8_Scaleup
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.client;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.sun.security.auth.module.Krb5LoginModule;
import io.airlift.units.Duration;
import okhttp3.Authenticator;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.Route;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.Oid;
import javax.annotation.concurrent.GuardedBy;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Base64;
import java.util.Locale;
import java.util.Optional;
import static com.google.common.base.CharMatcher.whitespace;
import static com.google.common.base.Throwables.throwIfInstanceOf;
import static com.google.common.base.Throwables.throwIfUnchecked;
import static com.google.common.net.HttpHeaders.AUTHORIZATION;
import static com.google.common.net.HttpHeaders.WWW_AUTHENTICATE;
import static java.lang.Boolean.getBoolean;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.SECONDS;
import static javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag.REQUIRED;
import static org.ietf.jgss.GSSContext.INDEFINITE_LIFETIME;
import static org.ietf.jgss.GSSCredential.DEFAULT_LIFETIME;
import static org.ietf.jgss.GSSCredential.INITIATE_ONLY;
import static org.ietf.jgss.GSSName.NT_HOSTBASED_SERVICE;
import static org.ietf.jgss.GSSName.NT_USER_NAME;
// TODO: This class is similar to SpnegoAuthentication in Airlift. Consider extracting a library.
public class SpnegoHandler
implements Interceptor, Authenticator
{
private static final String NEGOTIATE = "Negotiate";
private static final Duration MIN_CREDENTIAL_LIFETIME = new Duration(60, SECONDS);
private static final GSSManager GSS_MANAGER = GSSManager.getInstance();
private static final Oid SPNEGO_OID = createOid("1.3.6.1.5.5.2");
private static final Oid KERBEROS_OID = createOid("1.2.840.113554.1.2.2");
private final String remoteServiceName;
private final boolean useCanonicalHostname;
private final Optional<String> principal;
private final Optional<File> keytab;
private final Optional<File> credentialCache;
@GuardedBy("this")
private Session clientSession;
public SpnegoHandler(
String remoteServiceName,
boolean useCanonicalHostname,
Optional<String> principal,
Optional<File> kerberosConfig,
Optional<File> keytab,
Optional<File> credentialCache)
{
this.remoteServiceName = requireNonNull(remoteServiceName, "remoteServiceName is null");
this.useCanonicalHostname = useCanonicalHostname;
this.principal = requireNonNull(principal, "principal is null");
this.keytab = requireNonNull(keytab, "keytab is null");
this.credentialCache = requireNonNull(credentialCache, "credentialCache is null");
kerberosConfig.ifPresent(file -> System.setProperty("java.security.krb5.conf", file.getAbsolutePath()));
}
@Override
public Response intercept(Chain chain)
throws IOException
{
// eagerly send authentication if possible
try {
return chain.proceed(authenticate(chain.request()));
}
catch (ClientException ignored) {
return chain.proceed(chain.request());
}
}
@Override
public Request authenticate(Route route, Response response)
{
// skip if we already tried or were not asked for Kerberos
if (response.request().headers(AUTHORIZATION).stream().anyMatch(SpnegoHandler::isNegotiate) ||
response.headers(WWW_AUTHENTICATE).stream().noneMatch(SpnegoHandler::isNegotiate)) {
return null;
}
return authenticate(response.request());
}
private static boolean isNegotiate(String value)
{
return Splitter.on(whitespace()).split(value).iterator().next().equalsIgnoreCase(NEGOTIATE);
}
private Request authenticate(Request request)
{
String hostName = request.url().host();
String principal = makeServicePrincipal(remoteServiceName, hostName, useCanonicalHostname);
byte[] token = generateToken(principal);
String credential = format("%s %s", NEGOTIATE, Base64.getEncoder().encodeToString(token));
return request.newBuilder()
.header(AUTHORIZATION, credential)
.build();
}
private byte[] generateToken(String servicePrincipal)
{
GSSContext context = null;
try {
Session session = getSession();
context = doAs(session.getLoginContext().getSubject(), () -> {
GSSContext result = GSS_MANAGER.createContext(
GSS_MANAGER.createName(servicePrincipal, NT_HOSTBASED_SERVICE),
SPNEGO_OID,
session.getClientCredential(),
INDEFINITE_LIFETIME);
result.requestMutualAuth(true);
result.requestConf(true);
result.requestInteg(true);
result.requestCredDeleg(false);
return result;
});
byte[] token = context.initSecContext(new byte[0], 0, 0);
if (token == null) {
throw new LoginException("No token generated from GSS context");
}
return token;
}
catch (GSSException | LoginException e) {
throw new ClientException(format("Kerberos error for [%s]: %s", servicePrincipal, e.getMessage()), e);
}
finally {
try {
if (context != null) {
context.dispose();
}
}
catch (GSSException ignored) {
}
}
}
private synchronized Session getSession()
throws LoginException, GSSException
{
if ((clientSession == null) || clientSession.needsRefresh()) {
clientSession = createSession();
}
return clientSession;
}
private Session createSession()
throws LoginException, GSSException
{
// TODO: do we need to call logout() on the LoginContext?
LoginContext loginContext = new LoginContext("", null, null, new Configuration()
{
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name)
{
ImmutableMap.Builder<String, String> options = ImmutableMap.builder();
options.put("refreshKrb5Config", "true");
options.put("doNotPrompt", "true");
options.put("useKeyTab", "true");
if (getBoolean("presto.client.debugKerberos")) {
options.put("debug", "true");
}
keytab.ifPresent(file -> options.put("keyTab", file.getAbsolutePath()));
credentialCache.ifPresent(file -> {
options.put("ticketCache", file.getAbsolutePath());
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
});
principal.ifPresent(value -> options.put("principal", value));
return new AppConfigurationEntry[] {
new AppConfigurationEntry(Krb5LoginModule.class.getName(), REQUIRED, options.build())
};
}
});
loginContext.login();
Subject subject = loginContext.getSubject();
Principal clientPrincipal = subject.getPrincipals().iterator().next();
GSSCredential clientCredential = doAs(subject, () -> GSS_MANAGER.createCredential(
GSS_MANAGER.createName(clientPrincipal.getName(), NT_USER_NAME),
DEFAULT_LIFETIME,
KERBEROS_OID,
INITIATE_ONLY));
return new Session(loginContext, clientCredential);
}
private static String makeServicePrincipal(String serviceName, String hostName, boolean useCanonicalHostname)
{
String serviceHostName = hostName;
if (useCanonicalHostname) {
serviceHostName = canonicalizeServiceHostName(hostName);
}
return format("%s@%s", serviceName, serviceHostName.toLowerCase(Locale.US));
}
private static String canonicalizeServiceHostName(String hostName)
{
try {
InetAddress address = InetAddress.getByName(hostName);
String fullHostName;
if ("localhost".equalsIgnoreCase(address.getHostName())) {
fullHostName = InetAddress.getLocalHost().getCanonicalHostName();
}
else {
fullHostName = address.getCanonicalHostName();
}
if (fullHostName.equalsIgnoreCase("localhost")) {
throw new ClientException("Fully qualified name of localhost should not resolve to 'localhost'. System configuration error?");
}
return fullHostName;
}
catch (UnknownHostException e) {
throw new ClientException("Failed to resolve host: " + hostName, e);
}
}
private interface GssSupplier<T>
{
T get()
throws GSSException;
}
private static <T> T doAs(Subject subject, GssSupplier<T> action)
throws GSSException
{
try {
return Subject.doAs(subject, (PrivilegedExceptionAction<T>) action::get);
}
catch (PrivilegedActionException e) {
Throwable t = e.getCause();
throwIfInstanceOf(t, GSSException.class);
throwIfUnchecked(t);
throw new RuntimeException(t);
}
}
private static Oid createOid(String value)
{
try {
return new Oid(value);
}
catch (GSSException e) {
throw new AssertionError(e);
}
}
private static class Session
{
private final LoginContext loginContext;
private final GSSCredential clientCredential;
public Session(LoginContext loginContext, GSSCredential clientCredential)
{
requireNonNull(loginContext, "loginContext is null");
requireNonNull(clientCredential, "gssCredential is null");
this.loginContext = loginContext;
this.clientCredential = clientCredential;
}
public LoginContext getLoginContext()
{
return loginContext;
}
public GSSCredential getClientCredential()
{
return clientCredential;
}
public boolean needsRefresh()
throws GSSException
{
return clientCredential.getRemainingLifetime() < MIN_CREDENTIAL_LIFETIME.getValue(SECONDS);
}
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.09.09 at 01:22:27 PM CEST
//
package test;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElementRefs;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlMixed;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <choice maxOccurs="unbounded" minOccurs="0">
* <element ref="{}inline-supplementary-material"/>
* <element ref="{}related-article"/>
* <element ref="{}related-object"/>
* <element ref="{}break"/>
* <element ref="{}bold"/>
* <element ref="{}italic"/>
* <element ref="{}monospace"/>
* <element ref="{}overline"/>
* <element ref="{}roman"/>
* <element ref="{}sans-serif"/>
* <element ref="{}sc"/>
* <element ref="{}strike"/>
* <element ref="{}underline"/>
* <element ref="{}alternatives"/>
* <element ref="{}inline-graphic"/>
* <element ref="{}private-char"/>
* <element ref="{}chem-struct"/>
* <element ref="{}inline-formula"/>
* <element ref="{}abbrev"/>
* <element ref="{}milestone-end"/>
* <element ref="{}milestone-start"/>
* <element ref="{}named-content"/>
* <element ref="{}styled-content"/>
* <element ref="{}price"/>
* <element ref="{}annotation"/>
* <element ref="{}article-title"/>
* <element ref="{}chapter-title"/>
* <element ref="{}collab"/>
* <element ref="{}comment"/>
* <element ref="{}conf-date"/>
* <element ref="{}conf-loc"/>
* <element ref="{}conf-name"/>
* <element ref="{}conf-sponsor"/>
* <element ref="{}date"/>
* <element ref="{}date-in-citation"/>
* <element ref="{}day"/>
* <element ref="{}edition"/>
* <element ref="{}email"/>
* <element ref="{}elocation-id"/>
* <element ref="{}etal"/>
* <element ref="{}ext-link"/>
* <element ref="{}fpage"/>
* <element ref="{}gov"/>
* <element ref="{}institution"/>
* <element ref="{}isbn"/>
* <element ref="{}issn"/>
* <element ref="{}issue"/>
* <element ref="{}issue-id"/>
* <element ref="{}issue-part"/>
* <element ref="{}issue-title"/>
* <element ref="{}lpage"/>
* <element ref="{}month"/>
* <element ref="{}name"/>
* <element ref="{}object-id"/>
* <element ref="{}page-range"/>
* <element ref="{}part-title"/>
* <element ref="{}patent"/>
* <element ref="{}person-group"/>
* <element ref="{}pub-id"/>
* <element ref="{}publisher-loc"/>
* <element ref="{}publisher-name"/>
* <element ref="{}role"/>
* <element ref="{}season"/>
* <element ref="{}series"/>
* <element ref="{}size"/>
* <element ref="{}source"/>
* <element ref="{}std"/>
* <element ref="{}string-name"/>
* <element ref="{}supplement"/>
* <element ref="{}trans-source"/>
* <element ref="{}trans-title"/>
* <element ref="{}uri"/>
* <element ref="{}volume"/>
* <element ref="{}volume-id"/>
* <element ref="{}volume-series"/>
* <element ref="{}year"/>
* <element ref="{}fn"/>
* <element ref="{}target"/>
* <element ref="{}xref"/>
* <element ref="{}sub"/>
* <element ref="{}sup"/>
* </choice>
* <attribute name="id" type="{http://www.w3.org/2001/XMLSchema}ID" />
* <attribute name="product-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute ref="{http://www.w3.org/1999/xlink}type"/>
* <attribute ref="{http://www.w3.org/1999/xlink}href"/>
* <attribute ref="{http://www.w3.org/1999/xlink}role"/>
* <attribute ref="{http://www.w3.org/1999/xlink}title"/>
* <attribute ref="{http://www.w3.org/1999/xlink}show"/>
* <attribute ref="{http://www.w3.org/1999/xlink}actuate"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"content"
})
@XmlRootElement(name = "product")
public class Product {
@XmlElementRefs({
@XmlElementRef(name = "break", type = Break.class),
@XmlElementRef(name = "issue-title", type = IssueTitle.class),
@XmlElementRef(name = "comment", type = Comment.class),
@XmlElementRef(name = "volume-series", type = VolumeSeries.class),
@XmlElementRef(name = "size", type = Size.class),
@XmlElementRef(name = "date", type = Date.class),
@XmlElementRef(name = "chem-struct", type = ChemStruct.class),
@XmlElementRef(name = "month", type = Month.class),
@XmlElementRef(name = "price", type = Price.class),
@XmlElementRef(name = "named-content", type = NamedContent.class),
@XmlElementRef(name = "gov", type = Gov.class),
@XmlElementRef(name = "object-id", type = ObjectId.class),
@XmlElementRef(name = "uri", type = Uri.class),
@XmlElementRef(name = "private-char", type = PrivateChar.class),
@XmlElementRef(name = "part-title", type = PartTitle.class),
@XmlElementRef(name = "string-name", type = StringName.class),
@XmlElementRef(name = "issn", type = Issn.class),
@XmlElementRef(name = "sub", type = Sub.class),
@XmlElementRef(name = "milestone-end", type = MilestoneEnd.class),
@XmlElementRef(name = "volume", type = Volume.class),
@XmlElementRef(name = "issue-part", type = JAXBElement.class),
@XmlElementRef(name = "publisher-loc", type = PublisherLoc.class),
@XmlElementRef(name = "inline-graphic", type = InlineGraphic.class),
@XmlElementRef(name = "issue-id", type = IssueId.class),
@XmlElementRef(name = "pub-id", type = PubId.class),
@XmlElementRef(name = "etal", type = JAXBElement.class),
@XmlElementRef(name = "isbn", type = Isbn.class),
@XmlElementRef(name = "person-group", type = PersonGroup.class),
@XmlElementRef(name = "role", type = Role.class),
@XmlElementRef(name = "collab", type = Collab.class),
@XmlElementRef(name = "abbrev", type = Abbrev.class),
@XmlElementRef(name = "series", type = Series.class),
@XmlElementRef(name = "ext-link", type = ExtLink.class),
@XmlElementRef(name = "issue", type = Issue.class),
@XmlElementRef(name = "related-object", type = RelatedObject.class),
@XmlElementRef(name = "annotation", type = Annotation.class),
@XmlElementRef(name = "strike", type = Strike.class),
@XmlElementRef(name = "trans-source", type = TransSource.class),
@XmlElementRef(name = "conf-date", type = ConfDate.class),
@XmlElementRef(name = "name", type = Name.class),
@XmlElementRef(name = "related-article", type = RelatedArticle.class),
@XmlElementRef(name = "fn", type = Fn.class),
@XmlElementRef(name = "overline", type = Overline.class),
@XmlElementRef(name = "elocation-id", type = ElocationId.class),
@XmlElementRef(name = "source", type = Source.class),
@XmlElementRef(name = "underline", type = Underline.class),
@XmlElementRef(name = "bold", type = Bold.class),
@XmlElementRef(name = "monospace", type = Monospace.class),
@XmlElementRef(name = "sup", type = Sup.class),
@XmlElementRef(name = "day", type = Day.class),
@XmlElementRef(name = "date-in-citation", type = DateInCitation.class),
@XmlElementRef(name = "institution", type = Institution.class),
@XmlElementRef(name = "styled-content", type = StyledContent.class),
@XmlElementRef(name = "target", type = Target.class),
@XmlElementRef(name = "patent", type = Patent.class),
@XmlElementRef(name = "roman", type = Roman.class),
@XmlElementRef(name = "sc", type = Sc.class),
@XmlElementRef(name = "sans-serif", type = SansSerif.class),
@XmlElementRef(name = "publisher-name", type = JAXBElement.class),
@XmlElementRef(name = "chapter-title", type = ChapterTitle.class),
@XmlElementRef(name = "edition", type = Edition.class),
@XmlElementRef(name = "conf-sponsor", type = ConfSponsor.class),
@XmlElementRef(name = "year", type = Year.class),
@XmlElementRef(name = "volume-id", type = VolumeId.class),
@XmlElementRef(name = "email", type = Email.class),
@XmlElementRef(name = "supplement", type = Supplement.class),
@XmlElementRef(name = "page-range", type = PageRange.class),
@XmlElementRef(name = "milestone-start", type = MilestoneStart.class),
@XmlElementRef(name = "xref", type = Xref.class),
@XmlElementRef(name = "conf-loc", type = ConfLoc.class),
@XmlElementRef(name = "fpage", type = Fpage.class),
@XmlElementRef(name = "article-title", type = ArticleTitle.class),
@XmlElementRef(name = "lpage", type = Lpage.class),
@XmlElementRef(name = "alternatives", type = Alternatives.class),
@XmlElementRef(name = "std", type = Std.class),
@XmlElementRef(name = "season", type = Season.class),
@XmlElementRef(name = "italic", type = Italic.class),
@XmlElementRef(name = "conf-name", type = ConfName.class),
@XmlElementRef(name = "inline-supplementary-material", type = InlineSupplementaryMaterial.class),
@XmlElementRef(name = "trans-title", type = TransTitle.class),
@XmlElementRef(name = "inline-formula", type = InlineFormula.class)
})
@XmlMixed
protected List<Object> content;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(name = "product-type")
@XmlSchemaType(name = "anySimpleType")
protected String productType;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String type;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlSchemaType(name = "anySimpleType")
protected String href;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlSchemaType(name = "anySimpleType")
protected String role;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlSchemaType(name = "anySimpleType")
protected String title;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String show;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String actuate;
/**
* Gets the value of the content property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the content property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Break }
* {@link IssueTitle }
* {@link Comment }
* {@link VolumeSeries }
* {@link Size }
* {@link ChemStruct }
* {@link Date }
* {@link Month }
* {@link Price }
* {@link NamedContent }
* {@link ObjectId }
* {@link Gov }
* {@link String }
* {@link Uri }
* {@link PartTitle }
* {@link PrivateChar }
* {@link StringName }
* {@link Issn }
* {@link Sub }
* {@link MilestoneEnd }
* {@link Volume }
* {@link JAXBElement }{@code <}{@link String }{@code >}
* {@link PublisherLoc }
* {@link InlineGraphic }
* {@link PubId }
* {@link IssueId }
* {@link Isbn }
* {@link JAXBElement }{@code <}{@link String }{@code >}
* {@link Collab }
* {@link Role }
* {@link PersonGroup }
* {@link Abbrev }
* {@link ExtLink }
* {@link Series }
* {@link Issue }
* {@link Annotation }
* {@link RelatedObject }
* {@link TransSource }
* {@link Strike }
* {@link ConfDate }
* {@link Name }
* {@link RelatedArticle }
* {@link Fn }
* {@link Overline }
* {@link ElocationId }
* {@link Underline }
* {@link Source }
* {@link Monospace }
* {@link Bold }
* {@link DateInCitation }
* {@link Day }
* {@link Sup }
* {@link StyledContent }
* {@link Institution }
* {@link Target }
* {@link Roman }
* {@link Patent }
* {@link Sc }
* {@link JAXBElement }{@code <}{@link String }{@code >}
* {@link SansSerif }
* {@link ChapterTitle }
* {@link Edition }
* {@link ConfSponsor }
* {@link Year }
* {@link VolumeId }
* {@link Email }
* {@link PageRange }
* {@link Supplement }
* {@link Xref }
* {@link MilestoneStart }
* {@link ConfLoc }
* {@link Fpage }
* {@link Lpage }
* {@link ArticleTitle }
* {@link Alternatives }
* {@link Std }
* {@link Season }
* {@link ConfName }
* {@link Italic }
* {@link InlineSupplementaryMaterial }
* {@link TransTitle }
* {@link InlineFormula }
*
*
*/
public List<Object> getContent() {
if (content == null) {
content = new ArrayList<Object>();
}
return this.content;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the productType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getProductType() {
return productType;
}
/**
* Sets the value of the productType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setProductType(String value) {
this.productType = value;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setType(String value) {
this.type = value;
}
/**
* Gets the value of the href property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHref() {
return href;
}
/**
* Sets the value of the href property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHref(String value) {
this.href = value;
}
/**
* Gets the value of the role property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRole() {
return role;
}
/**
* Sets the value of the role property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRole(String value) {
this.role = value;
}
/**
* Gets the value of the title property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTitle() {
return title;
}
/**
* Sets the value of the title property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTitle(String value) {
this.title = value;
}
/**
* Gets the value of the show property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getShow() {
return show;
}
/**
* Sets the value of the show property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setShow(String value) {
this.show = value;
}
/**
* Gets the value of the actuate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getActuate() {
return actuate;
}
/**
* Sets the value of the actuate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setActuate(String value) {
this.actuate = value;
}
}
| |
/*
* Licensed to Think Big Analytics, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Think Big Analytics, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright 2011 Think Big Analytics. All Rights Reserved.
*/
package tap;
import tap.compression.Compressions;
import tap.core.InfeasiblePlanException;
import tap.core.TapContext;
import tap.core.io.BinaryKey;
import tap.core.mapreduce.input.TapfileRecordReader;
import tap.core.mapreduce.io.BinaryWritable;
import tap.formats.*;
import tap.util.ObjectFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Iterator;
import org.apache.avro.mapred.AvroValue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
@SuppressWarnings("deprecation")
public class Pipe<T> implements Iterable<T>, Iterator<T> {
private TapContext<T> context; // for OutPipe
private Iterator<AvroValue<T>> values; // for InPipe
Formats format = Formats.UNKNOWN_FORMAT;
private Phase producer;
protected String path;
protected T prototype;
String uncompressedPath;
protected Compressions compression = null;
protected boolean isCompressed = false;
boolean isTempfile = false;
private DFSStat stat;
//to support subscribe API
boolean isDirectFileAccess = false;
TapfileRecordReader recordReader = null;
BinaryKey binaryKey = null;
BinaryWritable writable = null;
// Pipe's reference to the job configuration
Configuration conf = null;
public static <T> Pipe<T> of(Class<? extends T> ofClass) {
try {
return new Pipe<T>(ObjectFactory.newInstance(ofClass));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static <T> Pipe<T> of(T prototype) {
return new Pipe<T>(prototype);
}
@Deprecated
public Pipe(T prototype) {
this.setPrototype(prototype);
}
Pipe(String path) {
setPath(path);
init();
}
/**
* Temporary inter-phase pipe
* @param isTemporary
*/
Pipe(boolean isTemporary) {
isTempfile = isTemporary;
}
/**
* Setup job configuration (and cache it) on the Pipe
* @param conf
*/
void setConf(Configuration conf) {
if (null == conf) {
throw new IllegalArgumentException("Please don't give us a null Configuration object");
}
this.conf = conf;
}
/**
*
* @return The job configuration
*/
Configuration getConf() {
return conf;
}
/**
* Generate and return DFS file stat info
* @return The file status
*/
DFSStat stat() {
if (null == stat) {
this.stat = new DFSStat(path,getConf());
}
return stat;
}
/*
* Probe HDFS to determine if this.path exists.
*/
boolean exists() {
return stat().exists;
}
/**
* Determine if file(s) in path are obsolete. Used in generating a work
* plan.
* @return True if obsolete
*/
boolean isObsolete() {
// this needs to be smart - we should encode in the file metadata
// the dependents and their dates used
// so we can verify that any existing antecedent is not newer and
// declare victory...
if (stat().exists) {
for (FileStatus status : stat().getStatuses()) {
if (!status.isDir()) {
// TODO add other types?
if (getFormat() != Formats.AVRO_FORMAT
|| status.getPath().toString().endsWith(".avro")) {
return false; // may check for extension for other
// types
}
} else {
if (!status.getPath().toString().endsWith("/_logs")
&& !status.getPath().toString()
.endsWith("/_temporary")) {
return false;
}
}
}
}
return true; // needs more work!
}
//for subscribe
public void setRecordReader(TapfileRecordReader reader)
{
this.recordReader = reader;
isDirectFileAccess = true;
binaryKey = reader.createKey();
writable = reader.createValue();
}
/**
* Check outputs
* @param conf
*/
void clearAndPrepareOutput() {
try {
if (stat().exists) {
for (FileStatus status : stat().getStatuses()) {
if (status.isDir()) {
if (!status.getPath().toString().endsWith("/_logs")
&& !status.getPath().toString()
.endsWith("/_temporary")) {
throw new IllegalArgumentException(
"Trying to overwrite directory with child directories: "
+ path);
}
}
}
} else {
stat().fs.mkdirs(stat().dfsPath);
}
stat().fs.delete(stat().dfsPath, true);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Make determination of this (input) pipe is valid. This test is only
* useful during early binding.
*
* @return
*/
boolean isValidInput() {
return isTempfile
|| hasWildcard()
|| isFile()
|| isSingleDir()
;
}
boolean isFile() {
return stat().exists && stat().isFile;
}
boolean isSingleDir() {
return stat().exists && !stat().isFile && !hasSubdirs();
}
boolean hasSubdirs() {
return false; //TODO: Implement logic here if performance is acceptable
}
boolean hasWildcard() {
return path.contains("*")
|| path.contains("?")
|| path.contains("[");
}
public void delete() {
clearAndPrepareOutput();
}
@SuppressWarnings("unchecked")
public Pipe<T> stringFormat() {
setPrototype((T) new String());
return this;
}
public Pipe<T> jsonFormat() {
this.setFormat(Formats.JSON_FORMAT);
return this;
}
public Pipe<T> avroFormat() {
this.setFormat(Formats.AVRO_FORMAT);
return this;
}
public Pipe<T> protoFormat() {
this.setFormat(Formats.TAPPROTO_FORMAT);
return this;
}
// Compression Methods
public Pipe<T> gzipCompression() {
this.setCompression(Compressions.GZIP_COMPRESSION);
return this;
}
public Compressions getCompression() {
return compression;
}
void setupOutput(JobConf conf) {
getFormat().getFileFormat().setupOutput(conf,
getPrototype() == null ? null : getPrototype().getClass());
if (this.isCompressed == true) {
getCompression().getCompression().setupOutput(conf);
}
}
public void setupInput(JobConf conf) {
getFormat().getFileFormat().setupInput(conf,
getPrototype() == null ? null : getPrototype().getClass());
if (this.isCompressed == true) {
getCompression().getCompression().setupInput(conf);
}
}
/**
* Return timestamp of @path
*
* @param conf
* Environment configuration
* @return the timestamp
*/
public long getTimestamp() {
return stat().timestamp;
}
/**
* Files at the same location are deemed equal, however Pipe needs to warn
* if there are inconsistencies.
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Pipe other = (Pipe) obj;
if (path == null) {
if (other.path != null)
return false;
} else if (!path.equals(other.path))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((path == null) ? 0x123c67ce : path.hashCode());
return result;
}
/**
* Turn on/off the Pipe's compression
*
* @param isCompressed
* true if compression is to be used
* @return this
*/
public Pipe<T> compressed(boolean isCompressed) {
this.isCompressed = isCompressed;
return this;
}
/**
* InPipe type constructor Reducer In pipe
*
* @param values
*/
public Pipe(Iterator<AvroValue<T>> values) {
this.values = values;
}
public boolean hasNext() {
if(isDirectFileAccess)
{
return recordReader.hasNext();
}
else
{
return this.values.hasNext();
}
}
public Iterator<T> iterator() {
return this;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/**
* Get next object of Type <T> from @Pipe
*
* @return Object value
*/
public T next() {
if(isDirectFileAccess)
{
try {
recordReader.next(binaryKey, writable);
return (T) writable.get(); //is there anyway to get the type?
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
return (T) null;
}
}
else
{
T val = this.values.next().datum();
return (T) val;
}
}
/**
* Alias for next()
*
* @return The next value in the Iterator
*/
public T get() {
return this.next();
}
/**
* @return the context
*/
public TapContext<T> getContext() {
return context;
}
/**
* @param context
* The context to set
*/
public void setContext(TapContext<T> context) {
this.context = context;
}
/**
* Put value @out into output
*
* @param out
* The value to put
*/
public void put(T out) {
this.context.write(out);
}
public void put(T out, String multiName) {
this.context.write(out, multiName);
}
/**
* @return The phase that produces this file.
*/
public Phase getProducer() {
return producer;
}
public void setProducer(Phase producer) {
this.producer = producer;
}
public String getPath() {
return path;
}
public Pipe<T> at(String path) {
setPath(path);
return this;
}
@Override
public String toString() {
return path + ":" + super.toString();
}
public T getPrototype() {
return prototype;
}
public Formats getPipeFormat() throws FileNotFoundException, IOException, IllegalArgumentException
{
if(!isFile())
throw new IllegalArgumentException("Pipe is not associated with a file, so has no format");
Path p = new Path(path);
return sniffFileFormat(p);
}
public Class readPipeClassFromFile(Configuration job) throws IOException {
if(path == null)
throw new IllegalArgumentException("specify file or directory for mapper before setting prototype");
Path p = new Path(path);
return TapfileRecordReader.readMessageClass(job, p);
}
public void setPrototypeForMapperInput(T prototype) throws InfeasiblePlanException {
Formats sniffedFileFormat;
if(path == null)
throw new IllegalArgumentException("specify file or directory for mapper before setting prototype");
Path p = new Path(path);
if(isSingleDir())
{
FileStatus[] status;
try {
FileSystem f = FileSystem.get(getConf());
status = f.listStatus(p);
}
catch(IOException ioexception)
{
throw new InfeasiblePlanException("invalid path");
}
if(status.length == 0) //directory is empty
{ this.prototype = prototype;
return;
}
else //read the first file in directory
{
p = status[0].getPath();
if(p.getName().equals("_SUCCESS"))
{
if(status.length == 1) //directory is empty except for _SUCCESS file
{
this.prototype = prototype;
return;
}
else
{
p = status[1].getPath();
}
}
}
}
try {
sniffedFileFormat = sniffFileFormat(p);
}
catch(Exception e)
{
throw new InfeasiblePlanException(e.getMessage());
}
if(format != sniffedFileFormat)
{
//alert user?
//override file extension
format = sniffedFileFormat;
}
if(format.getFileFormat().instanceOfCheck(prototype))
{
//can do additional checking i.e., read file to make sure it matches it file extension, make sure it contains the correct objects.
this.prototype = prototype;
}
else
{
throw new InfeasiblePlanException("Pipe prototype and file type are not compatible");
}
}
//NB this duplicates code found in TapfileRecodReader. Need to re-factor.
private Formats sniffFileFormat(Path path) throws IOException, FileNotFoundException {
byte[] header;
FileSystem fs = path.getFileSystem(this.getConf());
FSDataInputStream in = null;
try {
in = fs.open(path);
header = new byte[1000];
in.read(header);
in.close();
} finally {
if(in != null)
in.close();
}
return determineFileFormat(header);
}
private Formats determineFileFormat(byte[] header) {
for (Formats format : Formats.values()) {
if (format.getFileFormat().signature(header)) {
return format;
}
}
return Formats.UNKNOWN_FORMAT;
}
public void setPrototype(T prototype) {
if (prototype == null) {
return;
}
this.prototype = prototype;
init();
}
public Formats getFormat() {
return format;
}
public void setFormat(Formats format) {
this.format = format;
}
public String getUncompressedPath() {
return uncompressedPath;
}
protected void setUncompressedPath(String uncompressedPath) {
this.uncompressedPath = uncompressedPath;
}
protected void setPath(String path) {
this.path = path;
}
protected Formats determineFormat() {
for (Formats f : Formats.values()) {
FileFormat fileFormat = f.getFileFormat();
if (fileFormat.matches(this)) {
return f;
}
}
return Formats.UNKNOWN_FORMAT;
}
/**
* Based on path name, determine if file is compressed
*/
protected void determineCompression() {
if (null != path) {
if (this.path.endsWith(Compressions.GZIP_COMPRESSION
.fileExtension())) {
this.isCompressed = true;
setCompression(Compressions.GZIP_COMPRESSION);
this.setUncompressedPath(this.path.replaceAll(".gz$", ""));
} else {
this.setUncompressedPath(path);
}
}
}
public Pipe<T> setCompression(Compressions compression) {
this.isCompressed = true;
this.compression = compression;
return this;
}
public boolean isCompressed() {
return isCompressed;
}
protected void init() {
determineCompression();
Formats format = determineFormat();
setFormat(format);
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.anonymousToInner;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.help.HelpManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.refactoring.HelpID;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.ui.NameSuggestionsField;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.refactoring.util.ParameterTablePanel;
import com.intellij.refactoring.util.RefactoringMessageUtil;
import com.intellij.refactoring.util.VariableData;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.NonFocusableCheckBox;
import com.intellij.util.Function;
import com.intellij.util.containers.HashMap;
import com.intellij.util.ui.FormBuilder;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.Map;
class AnonymousToInnerDialog extends DialogWrapper{
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.anonymousToInner.AnonymousToInnerDialog");
private final Project myProject;
private final PsiAnonymousClass myAnonClass;
private final boolean myShowCanBeStatic;
private NameSuggestionsField myNameField;
private final VariableData[] myVariableData;
private final Map<PsiVariable,VariableInfo> myVariableToInfoMap = new HashMap<>();
private JCheckBox myCbMakeStatic;
public AnonymousToInnerDialog(Project project, PsiAnonymousClass anonClass, final VariableInfo[] variableInfos,
boolean showCanBeStatic) {
super(project, true);
myProject = project;
myAnonClass = anonClass;
myShowCanBeStatic = showCanBeStatic;
setTitle(AnonymousToInnerHandler.REFACTORING_NAME);
for (VariableInfo info : variableInfos) {
myVariableToInfoMap.put(info.variable, info);
}
myVariableData = new VariableData[variableInfos.length];
final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(myProject);
for(int idx = 0; idx < variableInfos.length; idx++){
VariableInfo info = variableInfos[idx];
String name = info.variable.getName();
VariableKind kind = codeStyleManager.getVariableKind(info.variable);
name = codeStyleManager.variableNameToPropertyName(name, kind);
name = codeStyleManager.propertyNameToVariableName(name, VariableKind.PARAMETER);
VariableData data = new VariableData(info.variable);
data.name = name;
data.passAsParameter = true;
myVariableData[idx] = data;
}
init();
final String[] names;
String name = myAnonClass.getBaseClassReference().getReferenceName();
PsiType[] typeParameters = myAnonClass.getBaseClassReference().getTypeParameters();
final String typeParamsList = StringUtil.join(typeParameters, psiType -> {
PsiType type = psiType;
if (psiType instanceof PsiClassType) {
type = TypeConversionUtil.erasure(psiType);
}
if (type == null || type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) return "";
if (type instanceof PsiArrayType) {
type = type.getDeepComponentType();
}
return StringUtil.getShortName(type.getPresentableText());
}, "") + name;
if (!typeParamsList.equals(name)) {
names = new String[]{typeParamsList, "My" + name};
} else {
names = new String[]{"My" + name};
}
myNameField.setSuggestions(names);
myNameField.selectNameWithoutExtension();
}
@NotNull
protected Action[] createActions(){
return new Action[]{getOKAction(),getCancelAction(),getHelpAction()};
}
public JComponent getPreferredFocusedComponent() {
return myNameField.getFocusableComponent();
}
public boolean isMakeStatic() {
return myCbMakeStatic.isSelected();
}
public String getClassName() {
return myNameField.getEnteredName();
}
public VariableInfo[] getVariableInfos() {
JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(myProject);
VariableInfo[] infos = new VariableInfo[myVariableData.length];
for (int idx = 0; idx < myVariableData.length; idx++) {
VariableData data = myVariableData[idx];
VariableInfo info = myVariableToInfoMap.get(data.variable);
info.passAsParameter = data.passAsParameter;
info.parameterName = data.name;
info.parameterName = data.name;
String propertyName = codeStyleManager.variableNameToPropertyName(data.name, VariableKind.PARAMETER);
info.fieldName = codeStyleManager.propertyNameToVariableName(propertyName, VariableKind.FIELD);
infos[idx] = info;
}
return infos;
}
protected void doOKAction(){
String errorString = null;
final String innerClassName = getClassName();
final PsiManager manager = PsiManager.getInstance(myProject);
if ("".equals(innerClassName)) {
errorString = RefactoringBundle.message("anonymousToInner.no.inner.class.name");
}
else {
if (!PsiNameHelper.getInstance(manager.getProject()).isIdentifier(innerClassName)) {
errorString = RefactoringMessageUtil.getIncorrectIdentifierMessage(innerClassName);
}
else{
PsiElement targetContainer = AnonymousToInnerHandler.findTargetContainer(myAnonClass);
if (targetContainer instanceof PsiClass) {
PsiClass targetClass = (PsiClass)targetContainer;
PsiClass[] innerClasses = targetClass.getInnerClasses();
for (PsiClass innerClass : innerClasses) {
if (innerClassName.equals(innerClass.getName())) {
errorString = RefactoringBundle.message("inner.class.exists", innerClassName, targetClass.getName());
break;
}
}
}
else {
LOG.assertTrue(false);
}
}
}
if (errorString != null) {
CommonRefactoringUtil.showErrorMessage(
AnonymousToInnerHandler.REFACTORING_NAME,
errorString,
HelpID.ANONYMOUS_TO_INNER,
myProject);
myNameField.requestFocusInWindow();
return;
}
super.doOKAction();
myNameField.requestFocusInWindow();
}
protected JComponent createNorthPanel() {
myNameField = new NameSuggestionsField(myProject);
FormBuilder formBuilder = FormBuilder.createFormBuilder()
.addLabeledComponent(RefactoringBundle.message("anonymousToInner.class.name.label.text"), myNameField);
if(!myShowCanBeStatic) {
myCbMakeStatic = new NonFocusableCheckBox(RefactoringBundle.message("anonymousToInner.make.class.static.checkbox.text"));
myCbMakeStatic.setSelected(true);
formBuilder.addComponent(myCbMakeStatic);
}
return formBuilder.getPanel();
}
private JComponent createParametersPanel() {
JPanel panel = new ParameterTablePanel(myProject, myVariableData, myAnonClass) {
protected void updateSignature() {
}
protected void doEnterAction() {
clickDefaultButton();
}
protected void doCancelAction() {
AnonymousToInnerDialog.this.doCancelAction();
}
};
panel.setBorder(IdeBorderFactory.createTitledBorder(
RefactoringBundle.message("anonymousToInner.parameters.panel.border.title"), false));
return panel;
}
protected JComponent createCenterPanel() {
JPanel panel = new JPanel(new BorderLayout());
panel.add(createParametersPanel(), BorderLayout.CENTER);
return panel;
}
protected void doHelpAction() {
HelpManager.getInstance().invokeHelp(HelpID.ANONYMOUS_TO_INNER);
}
}
| |
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.*;
import java.nio.ByteBuffer;
import MyGame.Example.*;
import NamespaceA.*;
import NamespaceA.NamespaceB.*;
import com.google.flatbuffers.FlatBufferBuilder;
class JavaTest {
public static void main(String[] args) {
// First, let's test reading a FlatBuffer generated by C++ code:
// This file was generated from monsterdata_test.json
byte[] data = null;
File file = new File("monsterdata_test.mon");
RandomAccessFile f = null;
try {
f = new RandomAccessFile(file, "r");
data = new byte[(int)f.length()];
f.readFully(data);
f.close();
} catch(java.io.IOException e) {
System.out.println("FlatBuffers test: couldn't read file");
return;
}
// Now test it:
ByteBuffer bb = ByteBuffer.wrap(data);
TestBuffer(bb);
// Second, let's create a FlatBuffer from scratch in Java, and test it also.
// We use an initial size of 1 to exercise the reallocation algorithm,
// normally a size larger than the typical FlatBuffer you generate would be
// better for performance.
FlatBufferBuilder fbb = new FlatBufferBuilder(1);
int[] names = {fbb.createString("Frodo"), fbb.createString("Barney"), fbb.createString("Wilma")};
int[] off = new int[3];
Monster.startMonster(fbb);
Monster.addName(fbb, names[0]);
off[0] = Monster.endMonster(fbb);
Monster.startMonster(fbb);
Monster.addName(fbb, names[1]);
off[1] = Monster.endMonster(fbb);
Monster.startMonster(fbb);
Monster.addName(fbb, names[2]);
off[2] = Monster.endMonster(fbb);
int sortMons = fbb.createSortedVectorOfTables(new Monster(), off);
// We set up the same values as monsterdata.json:
int str = fbb.createString("MyMonster");
int inv = Monster.createInventoryVector(fbb, new byte[] { 0, 1, 2, 3, 4 });
int fred = fbb.createString("Fred");
Monster.startMonster(fbb);
Monster.addName(fbb, fred);
int mon2 = Monster.endMonster(fbb);
Monster.startTest4Vector(fbb, 2);
Test.createTest(fbb, (short)10, (byte)20);
Test.createTest(fbb, (short)30, (byte)40);
int test4 = fbb.endVector();
int testArrayOfString = Monster.createTestarrayofstringVector(fbb, new int[] {
fbb.createString("test1"),
fbb.createString("test2")
});
Monster.startMonster(fbb);
Monster.addPos(fbb, Vec3.createVec3(fbb, 1.0f, 2.0f, 3.0f, 3.0,
Color.Green, (short)5, (byte)6));
Monster.addHp(fbb, (short)80);
Monster.addName(fbb, str);
Monster.addInventory(fbb, inv);
Monster.addTestType(fbb, (byte)Any.Monster);
Monster.addTest(fbb, mon2);
Monster.addTest4(fbb, test4);
Monster.addTestarrayofstring(fbb, testArrayOfString);
Monster.addTestbool(fbb, false);
Monster.addTesthashu32Fnv1(fbb, Integer.MAX_VALUE + 1L);
Monster.addTestarrayoftables(fbb, sortMons);
int mon = Monster.endMonster(fbb);
Monster.finishMonsterBuffer(fbb, mon);
// Write the result to a file for debugging purposes:
// Note that the binaries are not necessarily identical, since the JSON
// parser may serialize in a slightly different order than the above
// Java code. They are functionally equivalent though.
try {
DataOutputStream os = new DataOutputStream(new FileOutputStream(
"monsterdata_java_wire.mon"));
os.write(fbb.dataBuffer().array(), fbb.dataBuffer().position(), fbb.offset());
os.close();
} catch(java.io.IOException e) {
System.out.println("FlatBuffers test: couldn't write file");
return;
}
// Test it:
TestExtendedBuffer(fbb.dataBuffer());
// Make sure it also works with read only ByteBuffers. This is slower,
// since creating strings incurs an additional copy
// (see Table.__string).
TestExtendedBuffer(fbb.dataBuffer().asReadOnlyBuffer());
TestEnums();
//Attempt to mutate Monster fields and check whether the buffer has been mutated properly
// revert to original values after testing
Monster monster = Monster.getRootAsMonster(fbb.dataBuffer());
// mana is optional and does not exist in the buffer so the mutation should fail
// the mana field should retain its default value
TestEq(monster.mutateMana((short)10), false);
TestEq(monster.mana(), (short)150);
// Accessing a vector of sorted by the key tables
TestEq(monster.testarrayoftables(0).name(), "Barney");
TestEq(monster.testarrayoftables(1).name(), "Frodo");
TestEq(monster.testarrayoftables(2).name(), "Wilma");
// Example of searching for a table by the key
TestEq(Monster.lookupByKey(sortMons, "Frodo", fbb.dataBuffer()).name(), "Frodo");
TestEq(Monster.lookupByKey(sortMons, "Barney", fbb.dataBuffer()).name(), "Barney");
TestEq(Monster.lookupByKey(sortMons, "Wilma", fbb.dataBuffer()).name(), "Wilma");
// testType is an existing field and mutating it should succeed
TestEq(monster.testType(), (byte)Any.Monster);
TestEq(monster.mutateTestType(Any.NONE), true);
TestEq(monster.testType(), (byte)Any.NONE);
TestEq(monster.mutateTestType(Any.Monster), true);
TestEq(monster.testType(), (byte)Any.Monster);
//mutate the inventory vector
TestEq(monster.mutateInventory(0, 1), true);
TestEq(monster.mutateInventory(1, 2), true);
TestEq(monster.mutateInventory(2, 3), true);
TestEq(monster.mutateInventory(3, 4), true);
TestEq(monster.mutateInventory(4, 5), true);
for (int i = 0; i < monster.inventoryLength(); i++) {
TestEq(monster.inventory(i), i + 1);
}
//reverse mutation
TestEq(monster.mutateInventory(0, 0), true);
TestEq(monster.mutateInventory(1, 1), true);
TestEq(monster.mutateInventory(2, 2), true);
TestEq(monster.mutateInventory(3, 3), true);
TestEq(monster.mutateInventory(4, 4), true);
// get a struct field and edit one of its fields
Vec3 pos = monster.pos();
TestEq(pos.x(), 1.0f);
pos.mutateX(55.0f);
TestEq(pos.x(), 55.0f);
pos.mutateX(1.0f);
TestEq(pos.x(), 1.0f);
TestExtendedBuffer(fbb.dataBuffer().asReadOnlyBuffer());
TestNamespaceNesting();
TestNestedFlatBuffer();
TestCreateByteVector();
TestCreateUninitializedVector();
System.out.println("FlatBuffers test: completed successfully");
}
static void TestEnums() {
TestEq(Color.name(Color.Red), "Red");
TestEq(Color.name(Color.Blue), "Blue");
TestEq(Any.name(Any.NONE), "NONE");
TestEq(Any.name(Any.Monster), "Monster");
}
static void TestBuffer(ByteBuffer bb) {
TestEq(Monster.MonsterBufferHasIdentifier(bb), true);
Monster monster = Monster.getRootAsMonster(bb);
TestEq(monster.hp(), (short)80);
TestEq(monster.mana(), (short)150); // default
TestEq(monster.name(), "MyMonster");
// monster.friendly() // can't access, deprecated
Vec3 pos = monster.pos();
TestEq(pos.x(), 1.0f);
TestEq(pos.y(), 2.0f);
TestEq(pos.z(), 3.0f);
TestEq(pos.test1(), 3.0);
TestEq(pos.test2(), Color.Green);
Test t = pos.test3();
TestEq(t.a(), (short)5);
TestEq(t.b(), (byte)6);
TestEq(monster.testType(), (byte)Any.Monster);
Monster monster2 = new Monster();
TestEq(monster.test(monster2) != null, true);
TestEq(monster2.name(), "Fred");
TestEq(monster.inventoryLength(), 5);
int invsum = 0;
for (int i = 0; i < monster.inventoryLength(); i++)
invsum += monster.inventory(i);
TestEq(invsum, 10);
// Alternative way of accessing a vector:
ByteBuffer ibb = monster.inventoryAsByteBuffer();
invsum = 0;
while (ibb.position() < ibb.limit())
invsum += ibb.get();
TestEq(invsum, 10);
Test test_0 = monster.test4(0);
Test test_1 = monster.test4(1);
TestEq(monster.test4Length(), 2);
TestEq(test_0.a() + test_0.b() + test_1.a() + test_1.b(), 100);
TestEq(monster.testarrayofstringLength(), 2);
TestEq(monster.testarrayofstring(0),"test1");
TestEq(monster.testarrayofstring(1),"test2");
TestEq(monster.testbool(), false);
}
// this method checks additional fields not present in the binary buffer read from file
// these new tests are performed on top of the regular tests
static void TestExtendedBuffer(ByteBuffer bb) {
TestBuffer(bb);
Monster monster = Monster.getRootAsMonster(bb);
TestEq(monster.testhashu32Fnv1(), Integer.MAX_VALUE + 1L);
}
static void TestNamespaceNesting() {
// reference / manipulate these to verify compilation
FlatBufferBuilder fbb = new FlatBufferBuilder(1);
TableInNestedNS.startTableInNestedNS(fbb);
TableInNestedNS.addFoo(fbb, 1234);
int nestedTableOff = TableInNestedNS.endTableInNestedNS(fbb);
TableInFirstNS.startTableInFirstNS(fbb);
TableInFirstNS.addFooTable(fbb, nestedTableOff);
int off = TableInFirstNS.endTableInFirstNS(fbb);
}
static void TestNestedFlatBuffer() {
final String nestedMonsterName = "NestedMonsterName";
final short nestedMonsterHp = 600;
final short nestedMonsterMana = 1024;
FlatBufferBuilder fbb1 = new FlatBufferBuilder(16);
int str1 = fbb1.createString(nestedMonsterName);
Monster.startMonster(fbb1);
Monster.addName(fbb1, str1);
Monster.addHp(fbb1, nestedMonsterHp);
Monster.addMana(fbb1, nestedMonsterMana);
int monster1 = Monster.endMonster(fbb1);
Monster.finishMonsterBuffer(fbb1, monster1);
byte[] fbb1Bytes = fbb1.sizedByteArray();
fbb1 = null;
FlatBufferBuilder fbb2 = new FlatBufferBuilder(16);
int str2 = fbb2.createString("My Monster");
int nestedBuffer = Monster.createTestnestedflatbufferVector(fbb2, fbb1Bytes);
Monster.startMonster(fbb2);
Monster.addName(fbb2, str2);
Monster.addHp(fbb2, (short)50);
Monster.addMana(fbb2, (short)32);
Monster.addTestnestedflatbuffer(fbb2, nestedBuffer);
int monster = Monster.endMonster(fbb2);
Monster.finishMonsterBuffer(fbb2, monster);
// Now test the data extracted from the nested buffer
Monster mons = Monster.getRootAsMonster(fbb2.dataBuffer());
Monster nestedMonster = mons.testnestedflatbufferAsMonster();
TestEq(nestedMonsterMana, nestedMonster.mana());
TestEq(nestedMonsterHp, nestedMonster.hp());
TestEq(nestedMonsterName, nestedMonster.name());
}
static void TestCreateByteVector() {
FlatBufferBuilder fbb = new FlatBufferBuilder(16);
int str = fbb.createString("MyMonster");
byte[] inventory = new byte[] { 0, 1, 2, 3, 4 };
int vec = fbb.createByteVector(inventory);
Monster.startMonster(fbb);
Monster.addInventory(fbb, vec);
Monster.addName(fbb, str);
int monster1 = Monster.endMonster(fbb);
Monster.finishMonsterBuffer(fbb, monster1);
Monster monsterObject = Monster.getRootAsMonster(fbb.dataBuffer());
TestEq(monsterObject.inventory(1), (int)inventory[1]);
TestEq(monsterObject.inventoryLength(), inventory.length);
TestEq(ByteBuffer.wrap(inventory), monsterObject.inventoryAsByteBuffer());
}
static void TestCreateUninitializedVector() {
FlatBufferBuilder fbb = new FlatBufferBuilder(16);
int str = fbb.createString("MyMonster");
byte[] inventory = new byte[] { 0, 1, 2, 3, 4 };
ByteBuffer bb = fbb.createUnintializedVector(1, inventory.length, 1);
for (byte i:inventory) {
bb.put(i);
}
int vec = fbb.endVector();
Monster.startMonster(fbb);
Monster.addInventory(fbb, vec);
Monster.addName(fbb, str);
int monster1 = Monster.endMonster(fbb);
Monster.finishMonsterBuffer(fbb, monster1);
Monster monsterObject = Monster.getRootAsMonster(fbb.dataBuffer());
TestEq(monsterObject.inventory(1), (int)inventory[1]);
TestEq(monsterObject.inventoryLength(), inventory.length);
TestEq(ByteBuffer.wrap(inventory), monsterObject.inventoryAsByteBuffer());
}
static <T> void TestEq(T a, T b) {
if (!a.equals(b)) {
System.out.println("" + a.getClass().getName() + " " + b.getClass().getName());
System.out.println("FlatBuffers test FAILED: \'" + a + "\' != \'" + b + "\'");
assert false;
System.exit(1);
}
}
}
| |
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2000-2021. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
package com.ericsson.otp.erlang;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.InetAddress;
/**
* Provides methods for registering, unregistering and looking up nodes with the
* Erlang portmapper daemon (Epmd). For each registered node, Epmd maintains
* information about the port on which incoming connections are accepted, as
* well as which versions of the Erlang communication protocol the node
* supports.
*
* <p>
* Nodes wishing to contact other nodes must first request information from Epmd
* before a connection can be set up, however this is done automatically by
* {@link OtpSelf#connect(OtpPeer) OtpSelf.connect()} when necessary.
*
* <p>
* The methods {@link #publishPort(OtpLocalNode) publishPort()} and
* {@link #unPublishPort(OtpLocalNode) unPublishPort()} will fail if an Epmd
* process is not running on the localhost. Additionally
* {@link #lookupPort(AbstractNode) lookupPort()} will fail if there is no Epmd
* process running on the host where the specified node is running. See the
* Erlang documentation for information about starting Epmd.
*
* <p>
* This class contains only static methods, there are no constructors.
*/
public class OtpEpmd {
private static class EpmdPort {
private static int epmdPort = 0;
public static int get() {
if (epmdPort == 0) {
String env;
try {
env = System.getenv("ERL_EPMD_PORT");
} catch (final java.lang.SecurityException e) {
env = null;
}
epmdPort = env != null ? Integer.parseInt(env) : 4369;
}
return epmdPort;
}
public static void set(final int port) {
epmdPort = port;
}
}
// common values
private static final byte stopReq = (byte) 115;
private static final byte port4req = (byte) 122;
private static final byte port4resp = (byte) 119;
private static final byte ALIVE2_REQ = (byte) 120;
private static final byte ALIVE2_RESP = (byte) 121;
private static final byte ALIVE2_X_RESP = (byte) 118;
private static final byte names4req = (byte) 110;
private static int traceLevel = 0;
private static final int traceThreshold = 4;
static {
// debug this connection?
final String trace = System.getProperties().getProperty(
"OtpConnection.trace");
try {
if (trace != null) {
traceLevel = Integer.valueOf(trace).intValue();
}
} catch (final NumberFormatException e) {
traceLevel = 0;
}
}
// only static methods: no public constructors
// hmm, idea: singleton constructor could spawn epmd process
private OtpEpmd() {
}
/**
* Set the port number to be used to contact the epmd process. Only needed
* when the default port is not desired and system environment variable
* ERL_EPMD_PORT cannot be read (applet).
*/
public static void useEpmdPort(final int port) {
EpmdPort.set(port);
}
/**
* Determine what port a node listens for incoming connections on.
*
* @return the listen port for the specified node, or 0 if the node was not
* registered with Epmd.
*
* @exception java.io.IOException
* if there was no response from the name server.
*/
public static int lookupPort(final AbstractNode node) throws IOException {
return r4_lookupPort(node);
}
/**
* Register with Epmd, so that other nodes are able to find and connect to
* it.
*
* @param node
* the server node that should be registered with Epmd.
*
* @return true if the operation was successful. False if the node was
* already registered.
*
* @exception java.io.IOException
* if there was no response from the name server.
*/
public static boolean publishPort(final OtpLocalNode node)
throws IOException {
OtpTransport s = null;
s = r4_publish(node);
node.setEpmd(s);
return s != null;
}
// Ask epmd to close his end of the connection.
// Caller should close his epmd socket as well.
// This method is pretty forgiving...
/**
* Unregister from Epmd. Other nodes wishing to connect will no longer be
* able to.
*
* <p>
* This method does not report any failures.
*/
public static void unPublishPort(final OtpLocalNode node) {
OtpTransport s = null;
try {
s = node.createTransport((String) null, EpmdPort.get());
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
obuf.write2BE(node.alive().length() + 1);
obuf.write1(stopReq);
obuf.writeN(node.alive().getBytes());
obuf.writeToAndFlush(s.getOutputStream());
// don't even wait for a response (is there one?)
if (traceLevel >= traceThreshold) {
System.out.println("-> UNPUBLISH " + node + " port="
+ node.port());
System.out.println("<- OK (assumed)");
}
} catch (final Exception e) {/* ignore all failures */
} finally {
try {
if (s != null) {
s.close();
}
} catch (final IOException e) { /* ignore close failure */
}
s = null;
}
}
private static int r4_lookupPort(final AbstractNode node)
throws IOException {
int port = 0;
OtpTransport s = null;
try {
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
s = node.createTransport(node.host(), EpmdPort.get());
// build and send epmd request
// length[2], tag[1], alivename[n] (length = n+1)
obuf.write2BE(node.alive().length() + 1);
obuf.write1(port4req);
obuf.writeN(node.alive().getBytes());
// send request
obuf.writeToAndFlush(s.getOutputStream());
if (traceLevel >= traceThreshold) {
System.out.println("-> LOOKUP (r4) " + node);
}
// receive and decode reply
// resptag[1], result[1], port[2], ntype[1], proto[1],
// disthigh[2], distlow[2], nlen[2], alivename[n],
// elen[2], edata[m]
final byte[] tmpbuf = new byte[100];
final int n = s.getInputStream().read(tmpbuf);
if (n < 0) {
s.close();
throw new IOException("Nameserver not responding on "
+ node.host() + " when looking up " + node.alive());
}
@SuppressWarnings("resource")
final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0);
final int response = ibuf.read1();
if (response == port4resp) {
final int result = ibuf.read1();
if (result == 0) {
port = ibuf.read2BE();
node.ntype = ibuf.read1();
node.proto = ibuf.read1();
node.distHigh = ibuf.read2BE();
node.distLow = ibuf.read2BE();
// ignore rest of fields
}
}
} catch (final IOException e) {
if (traceLevel >= traceThreshold) {
System.out.println("<- (no response)");
}
throw new IOException("Nameserver not responding on " + node.host()
+ " when looking up " + node.alive(), e);
} catch (final OtpErlangDecodeException e) {
if (traceLevel >= traceThreshold) {
System.out.println("<- (invalid response)");
}
throw new IOException("Nameserver not responding on " + node.host()
+ " when looking up " + node.alive());
} finally {
try {
if (s != null) {
s.close();
}
} catch (final IOException e) { /* ignore close errors */
}
s = null;
}
if (traceLevel >= traceThreshold) {
if (port == 0) {
System.out.println("<- NOT FOUND");
} else {
System.out.println("<- PORT " + port);
}
}
return port;
}
/*
* this function will get an exception if it tries to talk to a very old
* epmd, or if something else happens that it cannot forsee. In both cases
* we return an exception. We no longer support r3, so the exception is
* fatal. If we manage to successfully communicate with an r4 epmd, we
* return either the socket, or null, depending on the result.
*/
private static OtpTransport r4_publish(final OtpLocalNode node)
throws IOException {
OtpTransport s = null;
try {
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
s = node.createTransport((String) null, EpmdPort.get());
obuf.write2BE(node.alive().length() + 13);
obuf.write1(ALIVE2_REQ);
obuf.write2BE(node.port());
obuf.write1(node.type());
obuf.write1(node.proto());
obuf.write2BE(node.distHigh());
obuf.write2BE(node.distLow());
obuf.write2BE(node.alive().length());
obuf.writeN(node.alive().getBytes());
obuf.write2BE(0); // No extra
// send request
obuf.writeToAndFlush(s.getOutputStream());
if (traceLevel >= traceThreshold) {
System.out.println("-> PUBLISH (r4) " + node + " port="
+ node.port());
}
// get reply
final byte[] tmpbuf = new byte[100];
final int n = s.getInputStream().read(tmpbuf);
if (n < 0) {
s.close();
throw new IOException("Nameserver not responding on "
+ node.host() + " when publishing " + node.alive());
}
@SuppressWarnings("resource")
final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0);
final int response = ibuf.read1();
if (response == ALIVE2_RESP || response == ALIVE2_X_RESP) {
final int result = ibuf.read1();
if (result == 0) {
node.creation = (response == ALIVE2_RESP
? ibuf.read2BE() : ibuf.read4BE());
if (traceLevel >= traceThreshold) {
System.out.println("<- OK");
}
return s; // success
}
}
} catch (final IOException e) {
// epmd closed the connection = fail
if (s != null) {
s.close();
}
if (traceLevel >= traceThreshold) {
System.out.println("<- (no response)");
}
throw new IOException("Nameserver not responding on " + node.host()
+ " when publishing " + node.alive());
} catch (final OtpErlangDecodeException e) {
s.close();
if (traceLevel >= traceThreshold) {
System.out.println("<- (invalid response)");
}
throw new IOException("Nameserver not responding on " + node.host()
+ " when publishing " + node.alive());
}
s.close();
return null;
}
public static String[] lookupNames() throws IOException {
return lookupNames(InetAddress.getByName(null),
new OtpSocketTransportFactory());
}
public static String[] lookupNames(
final OtpTransportFactory transportFactory) throws IOException {
return lookupNames(InetAddress.getByName(null), transportFactory);
}
public static String[] lookupNames(final InetAddress address)
throws IOException {
return lookupNames(address, new OtpSocketTransportFactory());
}
public static String[] lookupNames(final InetAddress address,
final OtpTransportFactory transportFactory) throws IOException {
OtpTransport s = null;
try {
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
try {
s = transportFactory.createTransport(address, EpmdPort.get());
obuf.write2BE(1);
obuf.write1(names4req);
// send request
obuf.writeToAndFlush(s.getOutputStream());
if (traceLevel >= traceThreshold) {
System.out.println("-> NAMES (r4) ");
}
// get reply
final byte[] buffer = new byte[256];
final ByteArrayOutputStream out = new ByteArrayOutputStream(256);
while (true) {
final int bytesRead = s.getInputStream().read(buffer);
if (bytesRead == -1) {
break;
}
out.write(buffer, 0, bytesRead);
}
final byte[] tmpbuf = out.toByteArray();
@SuppressWarnings("resource")
final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0);
ibuf.read4BE(); // read port int
// final int port = ibuf.read4BE();
// check if port = epmdPort
final int n = tmpbuf.length;
final byte[] buf = new byte[n - 4];
System.arraycopy(tmpbuf, 4, buf, 0, n - 4);
final String all = OtpErlangString.newString(buf);
return all.split("\n");
} finally {
if (s != null) {
s.close();
}
}
} catch (final IOException e) {
if (traceLevel >= traceThreshold) {
System.out.println("<- (no response)");
}
throw new IOException(
"Nameserver not responding when requesting names");
} catch (final OtpErlangDecodeException e) {
if (traceLevel >= traceThreshold) {
System.out.println("<- (invalid response)");
}
throw new IOException(
"Nameserver not responding when requesting names");
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.debugger.engine;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.actions.DebuggerActions;
import com.intellij.debugger.engine.evaluation.EvaluationContext;
import com.intellij.debugger.engine.events.DebuggerCommandImpl;
import com.intellij.debugger.engine.events.SuspendContextCommandImpl;
import com.intellij.debugger.impl.*;
import com.intellij.debugger.jdi.StackFrameProxyImpl;
import com.intellij.debugger.jdi.ThreadReferenceProxyImpl;
import com.intellij.debugger.memory.component.InstancesTracker;
import com.intellij.debugger.memory.component.MemoryViewDebugProcessData;
import com.intellij.debugger.memory.component.MemoryViewManager;
import com.intellij.debugger.memory.ui.ClassesFilteredView;
import com.intellij.debugger.settings.DebuggerSettings;
import com.intellij.debugger.ui.AlternativeSourceNotificationProvider;
import com.intellij.debugger.ui.DebuggerContentInfo;
import com.intellij.debugger.ui.breakpoints.Breakpoint;
import com.intellij.debugger.ui.impl.ThreadsPanel;
import com.intellij.debugger.ui.impl.watch.DebuggerTreeNodeImpl;
import com.intellij.debugger.ui.impl.watch.MessageDescriptor;
import com.intellij.debugger.ui.impl.watch.NodeManagerImpl;
import com.intellij.debugger.ui.overhead.OverheadView;
import com.intellij.debugger.ui.tree.NodeDescriptor;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.ui.ExecutionConsole;
import com.intellij.execution.ui.ExecutionConsoleEx;
import com.intellij.execution.ui.RunnerLayoutUi;
import com.intellij.execution.ui.layout.PlaceInGrid;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.EditorNotifications;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentManagerAdapter;
import com.intellij.ui.content.ContentManagerEvent;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.xdebugger.*;
import com.intellij.xdebugger.breakpoints.XBreakpoint;
import com.intellij.xdebugger.breakpoints.XBreakpointHandler;
import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.frame.XStackFrame;
import com.intellij.xdebugger.frame.XSuspendContext;
import com.intellij.xdebugger.frame.XValueMarkerProvider;
import com.intellij.xdebugger.impl.XDebugSessionImpl;
import com.intellij.xdebugger.impl.XDebuggerUtilImpl;
import com.intellij.xdebugger.ui.XDebugTabLayouter;
import com.sun.jdi.event.Event;
import com.sun.jdi.event.LocatableEvent;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.java.debugger.JavaDebuggerEditorsProvider;
/**
* @author egor
*/
public class JavaDebugProcess extends XDebugProcess {
private final DebuggerSession myJavaSession;
private final JavaDebuggerEditorsProvider myEditorsProvider;
private final XBreakpointHandler<?>[] myBreakpointHandlers;
private final NodeManagerImpl myNodeManager;
private static final JavaBreakpointHandlerFactory[] ourDefaultBreakpointHandlerFactories = {
JavaBreakpointHandler.JavaLineBreakpointHandler::new,
JavaBreakpointHandler.JavaExceptionBreakpointHandler::new,
JavaBreakpointHandler.JavaFieldBreakpointHandler::new,
JavaBreakpointHandler.JavaMethodBreakpointHandler::new,
JavaBreakpointHandler.JavaWildcardBreakpointHandler::new
};
public static JavaDebugProcess create(@NotNull final XDebugSession session, final DebuggerSession javaSession) {
JavaDebugProcess res = new JavaDebugProcess(session, javaSession);
javaSession.getProcess().setXDebugProcess(res);
return res;
}
protected JavaDebugProcess(@NotNull final XDebugSession session, final DebuggerSession javaSession) {
super(session);
myJavaSession = javaSession;
myEditorsProvider = new JavaDebuggerEditorsProvider();
final DebugProcessImpl process = javaSession.getProcess();
myBreakpointHandlers = StreamEx.of(ourDefaultBreakpointHandlerFactories)
.append(Extensions.getExtensions(JavaBreakpointHandlerFactory.EP_NAME))
.map(factory -> factory.createHandler(process))
.toArray(XBreakpointHandler[]::new);
myJavaSession.getContextManager().addListener(new DebuggerContextListener() {
@Override
public void changeEvent(@NotNull final DebuggerContextImpl newContext, DebuggerSession.Event event) {
if (event == DebuggerSession.Event.PAUSE
|| event == DebuggerSession.Event.CONTEXT
|| event == DebuggerSession.Event.REFRESH
|| event == DebuggerSession.Event.REFRESH_WITH_STACK
&& myJavaSession.isPaused()) {
final SuspendContextImpl newSuspendContext = newContext.getSuspendContext();
if (newSuspendContext != null &&
(shouldApplyContext(newContext) || event == DebuggerSession.Event.REFRESH_WITH_STACK)) {
process.getManagerThread().schedule(new SuspendContextCommandImpl(newSuspendContext) {
@Override
public void contextAction(@NotNull SuspendContextImpl suspendContext) throws Exception {
ThreadReferenceProxyImpl threadProxy = newContext.getThreadProxy();
newSuspendContext.initExecutionStacks(threadProxy);
Pair<Breakpoint, Event> item = ContainerUtil.getFirstItem(DebuggerUtilsEx.getEventDescriptors(newSuspendContext));
if (item != null) {
XBreakpoint xBreakpoint = item.getFirst().getXBreakpoint();
Event second = item.getSecond();
if (xBreakpoint != null && second instanceof LocatableEvent &&
threadProxy != null && ((LocatableEvent)second).thread() == threadProxy.getThreadReference()) {
((XDebugSessionImpl)getSession()).breakpointReachedNoProcessing(xBreakpoint, newSuspendContext);
unsetPausedIfNeeded(newContext);
SourceCodeChecker.checkSource(newContext);
return;
}
}
getSession().positionReached(newSuspendContext);
unsetPausedIfNeeded(newContext);
SourceCodeChecker.checkSource(newContext);
}
});
}
}
else if (event == DebuggerSession.Event.ATTACHED) {
getSession().rebuildViews(); // to refresh variables views message
}
}
});
myNodeManager = new NodeManagerImpl(session.getProject(), null) {
@NotNull
@Override
public DebuggerTreeNodeImpl createNode(final NodeDescriptor descriptor, EvaluationContext evaluationContext) {
return new DebuggerTreeNodeImpl(null, descriptor);
}
@Override
public DebuggerTreeNodeImpl createMessageNode(MessageDescriptor descriptor) {
return new DebuggerTreeNodeImpl(null, descriptor);
}
@NotNull
@Override
public DebuggerTreeNodeImpl createMessageNode(String message) {
return new DebuggerTreeNodeImpl(null, new MessageDescriptor(message));
}
};
session.addSessionListener(new XDebugSessionListener() {
@Override
public void sessionPaused() {
saveNodeHistory();
showAlternativeNotification(session.getCurrentStackFrame());
}
@Override
public void stackFrameChanged() {
XStackFrame frame = session.getCurrentStackFrame();
if (frame instanceof JavaStackFrame) {
showAlternativeNotification(frame);
StackFrameProxyImpl frameProxy = ((JavaStackFrame)frame).getStackFrameProxy();
DebuggerContextUtil.setStackFrame(javaSession.getContextManager(), frameProxy);
saveNodeHistory(frameProxy);
}
}
private void showAlternativeNotification(@Nullable XStackFrame frame) {
if (frame != null) {
XSourcePosition position = frame.getSourcePosition();
if (position != null) {
VirtualFile file = position.getFile();
if (!AlternativeSourceNotificationProvider.isFileProcessed(file)) {
EditorNotifications.getInstance(session.getProject()).updateNotifications(file);
}
}
}
}
});
}
private void unsetPausedIfNeeded(DebuggerContextImpl context) {
SuspendContextImpl suspendContext = context.getSuspendContext();
if (suspendContext != null && !suspendContext.suspends(context.getThreadProxy())) {
((XDebugSessionImpl)getSession()).unsetPaused();
}
}
private boolean shouldApplyContext(DebuggerContextImpl context) {
SuspendContextImpl suspendContext = context.getSuspendContext();
SuspendContextImpl currentContext = (SuspendContextImpl)getSession().getSuspendContext();
if (suspendContext != null && !suspendContext.equals(currentContext)) return true;
JavaExecutionStack currentExecutionStack = currentContext != null ? currentContext.getActiveExecutionStack() : null;
return currentExecutionStack == null || !Comparing.equal(context.getThreadProxy(), currentExecutionStack.getThreadProxy());
}
public void saveNodeHistory() {
saveNodeHistory(getDebuggerStateManager().getContext().getFrameProxy());
}
private void saveNodeHistory(final StackFrameProxyImpl frameProxy) {
myJavaSession.getProcess().getManagerThread().invoke(new DebuggerCommandImpl() {
@Override
protected void action() throws Exception {
myNodeManager.setHistoryByContext(frameProxy);
}
@Override
public Priority getPriority() {
return Priority.NORMAL;
}
});
}
private DebuggerStateManager getDebuggerStateManager() {
return myJavaSession.getContextManager();
}
public DebuggerSession getDebuggerSession() {
return myJavaSession;
}
@NotNull
@Override
public XDebuggerEditorsProvider getEditorsProvider() {
return myEditorsProvider;
}
@Override
public void startStepOver(@Nullable XSuspendContext context) {
myJavaSession.stepOver(false);
}
@Override
public void startStepInto(@Nullable XSuspendContext context) {
myJavaSession.stepInto(false, null);
}
@Override
public void startForceStepInto(@Nullable XSuspendContext context) {
myJavaSession.stepInto(true, null);
}
@Override
public void startStepOut(@Nullable XSuspendContext context) {
myJavaSession.stepOut();
}
@Override
public void stop() {
myJavaSession.dispose();
myNodeManager.dispose();
}
@Override
public void startPausing() {
myJavaSession.pause();
}
@Override
public void resume(@Nullable XSuspendContext context) {
myJavaSession.resume();
}
@Override
public void runToPosition(@NotNull XSourcePosition position, @Nullable XSuspendContext context) {
myJavaSession.runToCursor(position, false);
}
@NotNull
@Override
public XBreakpointHandler<?>[] getBreakpointHandlers() {
return myBreakpointHandlers;
}
@Override
public boolean checkCanInitBreakpoints() {
return false;
}
@Nullable
@Override
protected ProcessHandler doGetProcessHandler() {
return myJavaSession.getProcess().getProcessHandler();
}
@NotNull
@Override
public ExecutionConsole createConsole() {
ExecutionConsole console = myJavaSession.getProcess().getExecutionResult().getExecutionConsole();
if (console != null) return console;
return super.createConsole();
}
@NotNull
@Override
public XDebugTabLayouter createTabLayouter() {
return new XDebugTabLayouter() {
@Override
public void registerAdditionalContent(@NotNull RunnerLayoutUi ui) {
registerThreadsPanel(ui);
registerMemoryViewPanel(ui);
registerOverheadMonitor(ui);
}
@NotNull
@Override
public Content registerConsoleContent(@NotNull RunnerLayoutUi ui, @NotNull ExecutionConsole console) {
Content content = null;
if (console instanceof ExecutionConsoleEx) {
((ExecutionConsoleEx)console).buildUi(ui);
content = ui.findContent(DebuggerContentInfo.CONSOLE_CONTENT);
}
if (content == null) {
content = super.registerConsoleContent(ui, console);
}
return content;
}
private void registerThreadsPanel(@NotNull RunnerLayoutUi ui) {
final ThreadsPanel panel = new ThreadsPanel(myJavaSession.getProject(), getDebuggerStateManager());
final Content threadsContent = ui.createContent(
DebuggerContentInfo.THREADS_CONTENT, panel, XDebuggerBundle.message("debugger.session.tab.threads.title"),
AllIcons.Debugger.Threads, null);
threadsContent.setCloseable(false);
ui.addContent(threadsContent, 0, PlaceInGrid.left, true);
ui.addListener(new ContentManagerAdapter() {
@Override
public void selectionChanged(ContentManagerEvent event) {
if (event.getContent() == threadsContent) {
if (threadsContent.isSelected()) {
panel.setUpdateEnabled(true);
if (panel.isRefreshNeeded()) {
panel.rebuildIfVisible(DebuggerSession.Event.CONTEXT);
}
}
else {
panel.setUpdateEnabled(false);
}
}
}
}, threadsContent);
}
private void registerMemoryViewPanel(@NotNull RunnerLayoutUi ui) {
if (!Registry.is("debugger.enable.memory.view")) return;
final XDebugSession session = getSession();
final DebugProcessImpl process = myJavaSession.getProcess();
final InstancesTracker tracker = InstancesTracker.getInstance(myJavaSession.getProject());
final ClassesFilteredView classesFilteredView = new ClassesFilteredView(session, process, tracker);
final Content memoryViewContent =
ui.createContent(MemoryViewManager.MEMORY_VIEW_CONTENT, classesFilteredView, "Memory",
AllIcons.Debugger.MemoryView.Active, null);
memoryViewContent.setCloseable(false);
memoryViewContent.setShouldDisposeContent(true);
final MemoryViewDebugProcessData data = new MemoryViewDebugProcessData();
process.putUserData(MemoryViewDebugProcessData.KEY, data);
session.addSessionListener(new XDebugSessionListener() {
@Override
public void sessionStopped() {
session.removeSessionListener(this);
data.getTrackedStacks().clear();
}
});
ui.addContent(memoryViewContent, 0, PlaceInGrid.right, true);
final DebuggerManagerThreadImpl managerThread = process.getManagerThread();
ui.addListener(new ContentManagerAdapter() {
@Override
public void selectionChanged(ContentManagerEvent event) {
if (event != null && event.getContent() == memoryViewContent) {
classesFilteredView.setActive(memoryViewContent.isSelected(), managerThread);
}
}
}, memoryViewContent);
}
private void registerOverheadMonitor(@NotNull RunnerLayoutUi ui) {
if (!Registry.is("debugger.enable.overhead.monitor")) return;
DebugProcessImpl process = myJavaSession.getProcess();
OverheadView monitor = new OverheadView(process);
Content overheadContent = ui.createContent("OverheadMonitor", monitor, "Overhead", AllIcons.Debugger.Overhead, null);
monitor.setBouncer(() -> ui.setBouncing(overheadContent, true));
overheadContent.setCloseable(false);
overheadContent.setShouldDisposeContent(true);
ui.addContent(overheadContent, 0, PlaceInGrid.right, true);
}
};
}
@Override
public void registerAdditionalActions(@NotNull DefaultActionGroup leftToolbar,
@NotNull DefaultActionGroup topToolbar,
@NotNull DefaultActionGroup settings) {
Constraints beforeRunner = new Constraints(Anchor.BEFORE, "Runner.Layout");
leftToolbar.add(Separator.getInstance(), beforeRunner);
leftToolbar.add(ActionManager.getInstance().getAction(DebuggerActions.DUMP_THREADS), beforeRunner);
leftToolbar.add(Separator.getInstance(), beforeRunner);
Constraints beforeSort = new Constraints(Anchor.BEFORE, "XDebugger.ToggleSortValues");
settings.addAction(new WatchLastMethodReturnValueAction(), beforeSort);
settings.addAction(new AutoVarsSwitchAction(), beforeSort);
}
private static class AutoVarsSwitchAction extends ToggleAction {
private volatile boolean myAutoModeEnabled;
public AutoVarsSwitchAction() {
super(DebuggerBundle.message("action.auto.variables.mode"), DebuggerBundle.message("action.auto.variables.mode.description"), null);
myAutoModeEnabled = DebuggerSettings.getInstance().AUTO_VARIABLES_MODE;
}
@Override
public boolean isSelected(AnActionEvent e) {
return myAutoModeEnabled;
}
@Override
public void setSelected(AnActionEvent e, boolean enabled) {
myAutoModeEnabled = enabled;
DebuggerSettings.getInstance().AUTO_VARIABLES_MODE = enabled;
XDebuggerUtilImpl.rebuildAllSessionsViews(e.getProject());
}
}
private static class WatchLastMethodReturnValueAction extends ToggleAction {
private final String myText;
private final String myTextUnavailable;
public WatchLastMethodReturnValueAction() {
super("", DebuggerBundle.message("action.watch.method.return.value.description"), null);
myText = DebuggerBundle.message("action.watches.method.return.value.enable");
myTextUnavailable = DebuggerBundle.message("action.watches.method.return.value.unavailable.reason");
}
@Override
public void update(@NotNull final AnActionEvent e) {
super.update(e);
final Presentation presentation = e.getPresentation();
DebugProcessImpl process = getCurrentDebugProcess(e.getProject());
if (process == null || process.canGetMethodReturnValue()) {
presentation.setEnabled(true);
presentation.setText(myText);
}
else {
presentation.setEnabled(false);
presentation.setText(myTextUnavailable);
}
}
@Override
public boolean isSelected(AnActionEvent e) {
return DebuggerSettings.getInstance().WATCH_RETURN_VALUES;
}
@Override
public void setSelected(AnActionEvent e, boolean watch) {
DebuggerSettings.getInstance().WATCH_RETURN_VALUES = watch;
DebugProcessImpl process = getCurrentDebugProcess(e.getProject());
if (process != null) {
process.setWatchMethodReturnValuesEnabled(watch);
}
}
}
@Nullable
private static DebugProcessImpl getCurrentDebugProcess(@Nullable Project project) {
if (project != null) {
XDebugSession session = XDebuggerManager.getInstance(project).getCurrentSession();
if (session != null) {
XDebugProcess process = session.getDebugProcess();
if (process instanceof JavaDebugProcess) {
return ((JavaDebugProcess)process).getDebuggerSession().getProcess();
}
}
}
return null;
}
public NodeManagerImpl getNodeManager() {
return myNodeManager;
}
@Override
public String getCurrentStateMessage() {
String description = myJavaSession.getStateDescription();
return description != null ? description : super.getCurrentStateMessage();
}
@Nullable
@Override
public XValueMarkerProvider<?, ?> createValueMarkerProvider() {
return new JavaValueMarker();
}
@Override
public boolean isLibraryFrameFilterSupported() {
return true;
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.remote;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.nn.api.Model;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.parallelism.ParallelInference;
import org.nd4j.adapters.InferenceAdapter;
import org.nd4j.common.base.Preconditions;
import org.nd4j.linalg.dataset.MultiDataSet;
import org.nd4j.remote.clients.serde.BinaryDeserializer;
import org.nd4j.remote.clients.serde.BinarySerializer;
import org.nd4j.remote.clients.serde.JsonDeserializer;
import org.nd4j.remote.clients.serde.JsonSerializer;
import org.nd4j.remote.serving.SameDiffServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
/**
*
* @author astoyakin
*/
@Slf4j
@NoArgsConstructor
public class DL4jServlet<I,O> extends SameDiffServlet<I,O> {
protected ParallelInference parallelInference;
protected Model model;
protected boolean parallelEnabled = true;
public DL4jServlet(@NonNull ParallelInference parallelInference, @NonNull InferenceAdapter<I, O> inferenceAdapter,
JsonSerializer<O> serializer, JsonDeserializer<I> deserializer) {
super(inferenceAdapter, serializer, deserializer);
this.parallelInference = parallelInference;
this.model = null;
this.parallelEnabled = true;
}
public DL4jServlet(@NonNull Model model, @NonNull InferenceAdapter<I, O> inferenceAdapter,
JsonSerializer<O> serializer, JsonDeserializer<I> deserializer) {
super(inferenceAdapter, serializer, deserializer);
this.model = model;
this.parallelInference = null;
this.parallelEnabled = false;
}
public DL4jServlet(@NonNull ParallelInference parallelInference, @NonNull InferenceAdapter<I, O> inferenceAdapter,
BinarySerializer<O> serializer, BinaryDeserializer<I> deserializer) {
super(inferenceAdapter, serializer, deserializer);
this.parallelInference = parallelInference;
this.model = null;
this.parallelEnabled = true;
}
public DL4jServlet(@NonNull Model model, @NonNull InferenceAdapter<I, O> inferenceAdapter,
JsonSerializer<O> jsonSerializer, JsonDeserializer<I> jsonDeserializer,
BinarySerializer<O> binarySerializer, BinaryDeserializer<I> binaryDeserializer) {
super(inferenceAdapter, jsonSerializer, jsonDeserializer, binarySerializer, binaryDeserializer);
this.model = model;
this.parallelInference = null;
this.parallelEnabled = false;
}
public DL4jServlet(@NonNull ParallelInference parallelInference, @NonNull InferenceAdapter<I, O> inferenceAdapter,
JsonSerializer<O> jsonSerializer, JsonDeserializer<I> jsonDeserializer,
BinarySerializer<O> binarySerializer, BinaryDeserializer<I> binaryDeserializer) {
super(inferenceAdapter, jsonSerializer, jsonDeserializer, binarySerializer, binaryDeserializer);
this.parallelInference = parallelInference;
this.model = null;
this.parallelEnabled = true;
}
private O process(MultiDataSet mds) {
O result = null;
if (parallelEnabled) {
// process result
result = inferenceAdapter.apply(parallelInference.output(mds.getFeatures(), mds.getFeaturesMaskArrays()));
} else {
synchronized (this) {
if (model instanceof ComputationGraph)
result = inferenceAdapter.apply(((ComputationGraph) model).output(false, mds.getFeatures(), mds.getFeaturesMaskArrays()));
else if (model instanceof MultiLayerNetwork) {
Preconditions.checkArgument(mds.getFeatures().length > 0 || (mds.getFeaturesMaskArrays() != null && mds.getFeaturesMaskArrays().length > 0),
"Input data for MultilayerNetwork is invalid!");
result = inferenceAdapter.apply(((MultiLayerNetwork) model).output(mds.getFeatures()[0], false,
mds.getFeaturesMaskArrays() != null ? mds.getFeaturesMaskArrays()[0] : null, null));
}
}
}
return result;
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
String processorReturned = "";
MultiDataSet mds = null;
String path = request.getPathInfo();
if (path.equals(SERVING_ENDPOINT)) {
val contentType = request.getContentType();
if (contentType.equals(typeJson)) {
if (validateRequest(request, response)) {
val stream = request.getInputStream();
val bufferedReader = new BufferedReader(new InputStreamReader(stream));
char[] charBuffer = new char[128];
int bytesRead = -1;
val buffer = new StringBuilder();
while ((bytesRead = bufferedReader.read(charBuffer)) > 0) {
buffer.append(charBuffer, 0, bytesRead);
}
val requestString = buffer.toString();
mds = inferenceAdapter.apply(deserializer.deserialize(requestString));
}
}
else if (contentType.equals(typeBinary)) {
val stream = request.getInputStream();
int available = request.getContentLength();
if (available <= 0) {
response.sendError(411, "Content length is unavailable");
}
else {
byte[] data = new byte[available];
stream.read(data, 0, available);
mds = inferenceAdapter.apply(binaryDeserializer.deserialize(data));
}
}
if (mds == null)
log.error("InferenceAdapter failed");
else {
val result = process(mds);
if (binarySerializer != null) {
byte[] serialized = binarySerializer.serialize(result);
response.setContentType(typeBinary);
response.setContentLength(serialized.length);
val out = response.getOutputStream();
out.write(serialized);
}
else {
processorReturned = serializer.serialize(result);
try {
val out = response.getWriter();
out.write(processorReturned);
} catch (IOException e) {
log.error(e.getMessage());
}
}
}
} else {
// we return error otherwise
sendError(request.getRequestURI(), response);
}
}
/**
* Creates servlet to serve models
*
* @param <I> type of Input class
* @param <O> type of Output class
*
* @author raver119@gmail.com
* @author astoyakin
*/
public static class Builder<I,O> {
private ParallelInference pi;
private Model model;
private InferenceAdapter<I, O> inferenceAdapter;
private JsonSerializer<O> serializer;
private JsonDeserializer<I> deserializer;
private BinarySerializer<O> binarySerializer;
private BinaryDeserializer<I> binaryDeserializer;
private int port;
private boolean parallelEnabled = true;
public Builder(@NonNull ParallelInference pi) {
this.pi = pi;
}
public Builder(@NonNull Model model) {
this.model = model;
}
public Builder<I,O> inferenceAdapter(@NonNull InferenceAdapter<I,O> inferenceAdapter) {
this.inferenceAdapter = inferenceAdapter;
return this;
}
/**
* This method is required to specify serializer
*
* @param serializer
* @return
*/
public Builder<I,O> serializer(JsonSerializer<O> serializer) {
this.serializer = serializer;
return this;
}
/**
* This method allows to specify deserializer
*
* @param deserializer
* @return
*/
public Builder<I,O> deserializer(JsonDeserializer<I> deserializer) {
this.deserializer = deserializer;
return this;
}
/**
* This method is required to specify serializer
*
* @param serializer
* @return
*/
public Builder<I,O> binarySerializer(BinarySerializer<O> serializer) {
this.binarySerializer = serializer;
return this;
}
/**
* This method allows to specify deserializer
*
* @param deserializer
* @return
*/
public Builder<I,O> binaryDeserializer(BinaryDeserializer<I> deserializer) {
this.binaryDeserializer = deserializer;
return this;
}
/**
* This method allows to specify port
*
* @param port
* @return
*/
public Builder<I,O> port(int port) {
this.port = port;
return this;
}
/**
* This method activates parallel inference
*
* @param parallelEnabled
* @return
*/
public Builder<I,O> parallelEnabled(boolean parallelEnabled) {
this.parallelEnabled = parallelEnabled;
return this;
}
public DL4jServlet<I,O> build() {
return parallelEnabled ? new DL4jServlet<I, O>(pi, inferenceAdapter, serializer, deserializer, binarySerializer, binaryDeserializer) :
new DL4jServlet<I, O>(model, inferenceAdapter, serializer, deserializer, binarySerializer, binaryDeserializer);
}
}
}
| |
package view;
import Controller.LoggerController;
import cotton.network.DestinationMetaData;
import cotton.network.PathType;
import cotton.systemsupport.StatType;
import cotton.systemsupport.StatisticsRecorder.SampleRange;
import cotton.systemsupport.TimeInterval;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import javafx.animation.AnimationTimer;
import javafx.collections.ObservableList;
import javafx.scene.chart.Chart;
//import javafx.scene.chart.AreaChart;
import javafx.scene.chart.LineChart;
import javafx.scene.chart.NumberAxis;
import javafx.scene.chart.XYChart;
/**
*
* @author Magnus
* @author Mats
*/
public class GraphView implements DataPusherGraph<TimeInterval> {
//private AreaChart<Number, Number> graph;
private LineChart<Number, Number> graphIn;
private LineChart<Number, Number> graphOut;
private ConcurrentHashMap<DestinationMetaData, PlotInterval> plotData;
private String chartName;
private String dataName;
private long gTickTime = 0;
private class PlotInterval {
XYChart.Series in;
XYChart.Series out;
long startTime = 0;
int maxPoint;
String myName;
SampleRange range = null;
public PlotInterval(String name, long currentTime) {
this.myName = name;
this.in = new XYChart.Series();
this.out = new XYChart.Series();
this.in.setName(name + currentTime);
this.out.setName(name + currentTime);
this.startTime = currentTime;
this.maxPoint = gxMax;
}
public SampleRange getRange() {
return range;
}
public void setMaxPoint(int p) {
this.maxPoint = p;
}
public int getMaxPoint(int p) {
return this.maxPoint;
}
public XYChart.Series getIn() {
return in;
}
public XYChart.Series getOut() {
return out;
}
public void addTimeInterval(TimeInterval[] data, SampleRange range) {
this.range = range;
//startTime = 0;
// this.in.getData().clear();
// this.out.getData().clear();
ObservableList data1 = this.in.getData();
for (int i = 0; i < data.length; i++) {
TimeInterval d = data[i];
this.in.getData().add(new XYChart.Data(startTime, d.calculateInputIntensity()));
this.out.getData().add(new XYChart.Data(startTime, d.calculateOutputIntensity()));
startTime += 1; //d.getDeltaTime();
}
TimeInterval[] sampling = data;
System.out.println("Sampling:" + this.myName);
System.out.println("Sample count: " + sampling.length);
for (int i = 0; i < sampling.length; i++) {
System.out.println("\t" + sampling[i].toString());
}
if (this.in.getData().size() > this.maxPoint) {
this.in.getData().remove(0, this.in.getData().size() - this.maxPoint - 1);
//xA.setLowerBound(0);
System.out.println("In; rsize: " + this.out.getData().size());
}
if (this.out.getData().size() > this.maxPoint) {
this.out.getData().remove(0, this.out.getData().size() - this.maxPoint - 1);
System.out.println("out; rsize: " + this.out.getData().size());
}
// if(startTime > 20) {
// startTime = 0;
// }
/// xA.setLowerBound(0);
gTickTime = (gTickTime < startTime) ? startTime : gTickTime;
System.out.println("start time: " + startTime + " size: in: " + this.in.getData().size() + " out:" + this.out.getData().size());
//xA.setAutoRanging(true);
//this.out.getChart().setLayoutX(startTime);
}
}
public class UpdateTimer extends AnimationTimer {
private long lastTime = 0;
private long diff = 0;
@Override
public void handle(long now) {
displayData();
diff += now - lastTime;
lastTime = now;
if (diff > 1000000000) {
System.out.println("Tick: " + diff / 1000000);
updateGraph();
diff = 0;
}
}
}
private LoggerController controller = null;
private UpdateTimer updateTimer;
private NumberAxis xA;
private NumberAxis yA;
private int gxMax = 25;
public GraphView(String chartName, String dataName, LoggerController controller) {
this.updateTimer = new UpdateTimer();
this.chartName = chartName;
this.plotData = new ConcurrentHashMap<>();
this.xA = new NumberAxis(0, 20, 20 / 10);
this.xA.setForceZeroInRange(false);
this.xA.setAutoRanging(false);
this.yA = new NumberAxis();
yA.setAutoRanging(true);
// this.graph = new AreaChart<Number, Number>(xA, yA);
this.graphIn = new LineChart<Number, Number>(xA, yA);
this.graphOut = new LineChart<Number, Number>(xA, yA);
this.destinations = new HashSet<>();
// this.graph.setAnimated(true);
this.graphIn.setTitle(chartName + " in");
this.graphOut.setTitle(chartName + " out");
this.dataName = dataName;
this.controller = controller;
}
public String getDataName() {
return dataName;
}
public void setDataName(String dataName) {
this.dataName = dataName;
}
HashSet<DestinationMetaData> destinations = null;
StatType myType = StatType.UNKNOWN;
public void updateDestinationList(ArrayList<DestinationMetaData> gdest) {
if (gdest.isEmpty()) {
return;
}
destinations.addAll(gdest);//= gdest;
//controller.requestUsageData(dataName, this, gdest.get(0));
}
public void updateGraph() {
if (destinations == null) {
return;
}
for (DestinationMetaData d : destinations) {
PlotInterval get = this.plotData.get(d);
SampleRange range = null;
if (get != null) {
range = get.getRange();
}
controller.requestUsageData(dataName, this, d, range);
}
}
public void startAutoUpdate() {
this.updateTimer.start();
}
public void stopAutoUpdate() {
this.updateTimer.stop();
}
private class QData {
DestinationMetaData destination;
SampleRange range;
TimeInterval[] data;
public QData(DestinationMetaData destination, SampleRange range, TimeInterval[] data) {
this.destination = destination;
this.range = range;
this.data = data;
}
public DestinationMetaData getDestination() {
return destination;
}
public SampleRange getRange() {
return range;
}
public TimeInterval[] getData() {
return data;
}
}
public void displayData() {
QData qd = null;
while ((qd = dataQueue.poll()) != null) {
if (qd == null) {
break;
}
TimeInterval[] data = qd.getData();
if (data == null) {
continue;
}
PlotInterval plot = this.plotData.get(qd.destination);
if (plot == null) {
System.out.println("New plot");
plot = new PlotInterval(this.dataName, this.gTickTime);
plot.addTimeInterval(data, qd.getRange());
graphIn.getData().addAll(plot.getIn());
graphOut.getData().addAll(plot.getOut());
this.plotData.putIfAbsent(qd.destination, plot);
continue;
}
plot.addTimeInterval(data, qd.getRange());
}
xA.setUpperBound(this.gTickTime - 1);
xA.setLowerBound(this.gTickTime - this.gxMax);
}
final Object lock = new Object();
private ConcurrentLinkedQueue<QData> dataQueue = new ConcurrentLinkedQueue<>();
@Override
public void pushData(String name, DestinationMetaData destination, TimeInterval[] data, SampleRange range) {
if (name == null) {
return;
}
this.dataQueue.add(new QData(destination, range, data));
}
/**
* Get the value of graph
*
* @return the value of graph
*/
public LineChart<Number, Number> getInGraph() {
return this.graphIn;
}
/**
* Get the value of graph
*
* @return the value of graph
*/
public LineChart<Number, Number> getOutGraph() {
return this.graphOut;
}
}
| |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.commodity.calculator;
import com.opengamma.analytics.financial.commodity.derivative.CommodityFutureOption;
import com.opengamma.analytics.financial.equity.StaticReplicationDataBundle;
import com.opengamma.analytics.financial.model.volatility.BlackFormulaRepository;
import com.opengamma.util.ArgumentChecker;
/**
* Black methods for commodity future option prices and greeks.
*/
public final class CommodityFutureOptionBlackMethod {
/** A static instance of this class */
private static final CommodityFutureOptionBlackMethod INSTANCE = new CommodityFutureOptionBlackMethod();
/**
* @return The static instance of this class
*/
public static CommodityFutureOptionBlackMethod getInstance() {
return INSTANCE;
}
/**
* Private constructor
*/
private CommodityFutureOptionBlackMethod() {
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the <b>forward</b> price of an option using the Black formula. PV / ZeroBond(timeToSettlement)
*/
public double forwardPrice(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double strike = derivative.getStrike();
final double notional = derivative.getUnderlying().getUnitAmount();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
final double fwdPrice = BlackFormulaRepository.price(forward, strike, expiry, blackVol, derivative.isCall());
return notional * fwdPrice;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return Current DiscountBond or ZeroBond price for payment at the settlement date
*/
public double discountToSettlement(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double df = marketData.getDiscountCurve().getDiscountFactor(derivative.getUnderlying().getSettlement());
return df;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the <b>forward</b> value of the index, ie the fair strike of a forward agreement paying the index value at maturity
*/
public double forwardIndexValue(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double forward = marketData.getForwardCurve().getForward(expiry);
return forward;
}
/**
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the <b>spot</b> value of the index, i.e. the current market value
*/
public double spotIndexValue(final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(marketData, "marketData");
final double spot = marketData.getForwardCurve().getSpot();
return spot;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the present value of the option
*/
public double presentValue(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double fwdPrice = forwardPrice(derivative, marketData);
final double df = discountToSettlement(derivative, marketData);
return df * fwdPrice;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the sensitivity of the present value wrt the discounting rate
*/
public double rho(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double ttm = derivative.getUnderlying().getSettlement();
final double pv = presentValue(derivative, marketData);
return -ttm * pv;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the delta
*/
public double delta(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double strike = derivative.getStrike();
final boolean isCall = derivative.isCall();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
return BlackFormulaRepository.delta(forward, strike, expiry, blackVol, isCall);
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the forward delta wrt the forward underlying, ie the sensitivity of the undiscounted price to the forward value of the underlying, d(PV/Z)/d(fwdUnderlying)
*/
public double forwardDelta(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double undiscountDelta = delta(derivative, marketData);
return undiscountDelta;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the sensitivity of the present value wrt the forward value of the underlying, d(PV)/d(fwdUnderlying)
*/
public double deltaWrtForward(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double forwardDelta = forwardDelta(derivative, marketData);
final double zeroBond = discountToSettlement(derivative, marketData);
return forwardDelta * zeroBond;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the spot delta wrt the underlying, d(PV)/d(spotUnderlying)
*/
public double deltaWrtSpot(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double deltaWrtForward = deltaWrtForward(derivative, marketData);
final double forwardUnderlying = forwardIndexValue(derivative, marketData);
final double spotUnderlying = marketData.getForwardCurve().getSpot();
final double dForwardDSpot = forwardUnderlying / spotUnderlying;
return deltaWrtForward * dForwardDSpot;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the forward gamma wrt the forward underlying, ie the 2nd order sensitivity of the undiscounted price to the forward value of the underlying,
* $\frac{\partial^2 (PV/Z)}{\partial F^2}$
*/
public double forwardGamma(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double strike = derivative.getStrike();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
final double forwardGamma = BlackFormulaRepository.gamma(forward, strike, expiry, blackVol);
return forwardGamma;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the sensitivity of the forward delta wrt the forward value of the underlying, $\frac{\partial^2 (PV)}{\partial F^2}$
*/
public double gammaWrtForward(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double forwardGamma = forwardGamma(derivative, marketData);
final double zeroBond = discountToSettlement(derivative, marketData);
return forwardGamma * zeroBond;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the spot gamma wrt the spot underlying, ie the 2nd order sensitivity of the present value to the spot value of the underlying,
* $\frac{\partial^2 (PV)}{\partial S^2}$
*/
public double gammaWrtSpot(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double gammaWrtForward = gammaWrtForward(derivative, marketData);
final double forwardUnderlying = forwardIndexValue(derivative, marketData);
final double spotUnderlying = marketData.getForwardCurve().getSpot();
final double dForwardDSpot = forwardUnderlying / spotUnderlying;
return gammaWrtForward * dForwardDSpot * dForwardDSpot;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the simple vega, d(PV)/d(blackVol)
*/
public double vega(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double strike = derivative.getStrike();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
final double fwdVega = BlackFormulaRepository.vega(forward, strike, expiry, blackVol);
final double df = discountToSettlement(derivative, marketData);
return df * fwdVega;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the lognormal Black Volatility
*/
public double impliedVol(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double strike = derivative.getStrike();
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
return blackVol;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the forward Vomma, ie the 2nd order sensitivity of the undiscounted price to the implied vol,
* $\frac{\partial^2 (PV/Z)}{\partial \sigma^2}$
*/
public double forwardVomma(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double strike = derivative.getStrike();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
final double forwardVomma = BlackFormulaRepository.vomma(forward, strike, expiry, blackVol);
return forwardVomma;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the spot Vomma, ie the 2nd order sensitivity of the spot price to the implied vol,
* $\frac{\partial^2 (PV)}{\partial \sigma^2}$
*/
public double vomma(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double forwardVomma = forwardVomma(derivative, marketData);
final double zeroBond = discountToSettlement(derivative, marketData);
return forwardVomma * zeroBond;
}
/**
* Synonym for Vomma
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the spot Volga, ie the 2nd order sensitivity of the spot price to the implied vol,
* $\frac{\partial^2 (PV)}{\partial \sigma^2}$
*/
public double spotVolga(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
return vomma(derivative, marketData);
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the forward vanna wrt the forward underlying, ie the 2nd order cross-sensitivity of the undiscounted price to the forward and implied vol,
* $\frac{\partial^2 (PV/Z)}{\partial F \partial \sigma}$
*/
public double forwardVanna(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double strike = derivative.getStrike();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
final double forwardVanna = BlackFormulaRepository.vanna(forward, strike, expiry, blackVol);
return forwardVanna;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the spot vanna wrt the forward underlying, ie the 2nd order cross-sensitivity of the present value to the forward and implied vol,
* $\frac{\partial^2 (PV)}{\partial F \partial \sigma}$
*/
public double vannaWrtForward(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double forwardVanna = forwardVanna(derivative, marketData);
final double zeroBond = discountToSettlement(derivative, marketData);
return forwardVanna * zeroBond;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the spot vanna wrt the spot underlying, ie the 2nd order cross-sensitivity of the present value to the spot and implied vol,
* $\frac{\partial^2 (PV)}{\partial spot \partial \sigma}$
*/
public double vannaWrtSpot(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double vannaWrtForward = vannaWrtForward(derivative, marketData);
final double forwardUnderlying = forwardIndexValue(derivative, marketData);
final double spotUnderlying = marketData.getForwardCurve().getSpot();
final double dForwardDSpot = forwardUnderlying / spotUnderlying;
return vannaWrtForward * dForwardDSpot;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return Spot theta, ie the sensitivity of the present value to the time to expiration,
* $\frac{\partial (PV)}{\partial t}$
*/
public double theta(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double strike = derivative.getStrike();
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
final double interestRate = marketData.getDiscountCurve().getInterestRate(expiry);
final double theta = BlackFormulaRepository.theta(forward, strike, expiry, blackVol, derivative.isCall(), interestRate);
return theta;
}
/**
* @param derivative the OG-Analytics form of the derivative
* @param marketData the data bundle containing a BlackVolatilitySurface, forward commodity and funding curves
* @return the forward (i.e. driftless) theta
*/
public double forwardTheta(final CommodityFutureOption<?> derivative, final StaticReplicationDataBundle marketData) {
ArgumentChecker.notNull(derivative, "derivative");
ArgumentChecker.notNull(marketData, "marketData");
final double expiry = derivative.getExpiry();
final double forward = marketData.getForwardCurve().getForward(expiry);
final double strike = derivative.getStrike();
final double blackVol = marketData.getVolatilitySurface().getVolatility(expiry, strike);
final double forwardTheta = BlackFormulaRepository.driftlessTheta(forward, strike, expiry, blackVol);
return forwardTheta;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.bigtable;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.bigtable.v2.MutateRowResponse;
import com.google.bigtable.v2.Mutation;
import com.google.bigtable.v2.Row;
import com.google.bigtable.v2.RowFilter;
import com.google.bigtable.v2.SampleRowKeysResponse;
import com.google.cloud.bigtable.config.BigtableOptions;
import com.google.cloud.bigtable.config.CredentialOptions;
import com.google.cloud.bigtable.config.CredentialOptions.CredentialType;
import com.google.cloud.bigtable.config.RetryOptions;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.protobuf.ByteString;
import io.grpc.Status;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentLinkedQueue;
import javax.annotation.Nullable;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.protobuf.ProtoCoder;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.BoundedSource.BoundedReader;
import org.apache.beam.sdk.io.range.ByteKey;
import org.apache.beam.sdk.io.range.ByteKeyRange;
import org.apache.beam.sdk.io.range.ByteKeyRangeTracker;
import org.apache.beam.sdk.options.GcpOptions;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.PipelineRunner;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.util.ReleaseInfo;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PDone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A bounded source and sink for Google Cloud Bigtable.
*
* <p>For more information, see the online documentation at
* <a href="https://cloud.google.com/bigtable/">Google Cloud Bigtable</a>.
*
* <h3>Reading from Cloud Bigtable</h3>
*
* <p>The Bigtable source returns a set of rows from a single table, returning a
* {@code PCollection<Row>}.
*
* <p>To configure a Cloud Bigtable source, you must supply a table id and a {@link BigtableOptions}
* or builder configured with the project and other information necessary to identify the
* Bigtable instance. By default, {@link BigtableIO.Read} will read all rows in the table. The row
* range to be read can optionally be restricted using {@link BigtableIO.Read#withKeyRange}, and
* a {@link RowFilter} can be specified using {@link BigtableIO.Read#withRowFilter}. For example:
*
* <pre>{@code
* BigtableOptions.Builder optionsBuilder =
* new BigtableOptions.Builder()
* .setProjectId("project")
* .setInstanceId("instance");
*
* Pipeline p = ...;
*
* // Scan the entire table.
* p.apply("read",
* BigtableIO.read()
* .withBigtableOptions(optionsBuilder)
* .withTableId("table"));
*
* // Scan a prefix of the table.
* ByteKeyRange keyRange = ...;
* p.apply("read",
* BigtableIO.read()
* .withBigtableOptions(optionsBuilder)
* .withTableId("table")
* .withKeyRange(keyRange));
*
* // Scan a subset of rows that match the specified row filter.
* p.apply("filtered read",
* BigtableIO.read()
* .withBigtableOptions(optionsBuilder)
* .withTableId("table")
* .withRowFilter(filter));
* }</pre>
*
* <h3>Writing to Cloud Bigtable</h3>
*
* <p>The Bigtable sink executes a set of row mutations on a single table. It takes as input a
* {@link PCollection PCollection<KV<ByteString, Iterable<Mutation>>>}, where the
* {@link ByteString} is the key of the row being mutated, and each {@link Mutation} represents an
* idempotent transformation to that row.
*
* <p>To configure a Cloud Bigtable sink, you must supply a table id and a {@link BigtableOptions}
* or builder configured with the project and other information necessary to identify the
* Bigtable instance, for example:
*
* <pre>{@code
* BigtableOptions.Builder optionsBuilder =
* new BigtableOptions.Builder()
* .setProjectId("project")
* .setInstanceId("instance");
*
* PCollection<KV<ByteString, Iterable<Mutation>>> data = ...;
*
* data.apply("write",
* BigtableIO.write()
* .withBigtableOptions(optionsBuilder)
* .withTableId("table"));
* }</pre>
*
* <h3>Experimental</h3>
*
* <p>This connector for Cloud Bigtable is considered experimental and may break or receive
* backwards-incompatible changes in future versions of the Cloud Dataflow SDK. Cloud Bigtable is
* in Beta, and thus it may introduce breaking changes in future revisions of its service or APIs.
*
* <h3>Permissions</h3>
*
* <p>Permission requirements depend on the {@link PipelineRunner} that is used to execute the
* Dataflow job. Please refer to the documentation of corresponding
* {@link PipelineRunner PipelineRunners} for more details.
*/
@Experimental
public class BigtableIO {
private static final Logger logger = LoggerFactory.getLogger(BigtableIO.class);
/**
* Creates an uninitialized {@link BigtableIO.Read}. Before use, the {@code Read} must be
* initialized with a
* {@link BigtableIO.Read#withBigtableOptions(BigtableOptions) BigtableOptions} that specifies
* the source Cloud Bigtable instance, and a {@link BigtableIO.Read#withTableId tableId} that
* specifies which table to read. A {@link RowFilter} may also optionally be specified using
* {@link BigtableIO.Read#withRowFilter}.
*/
@Experimental
public static Read read() {
return new Read(null, "", ByteKeyRange.ALL_KEYS, null, null);
}
/**
* Creates an uninitialized {@link BigtableIO.Write}. Before use, the {@code Write} must be
* initialized with a
* {@link BigtableIO.Write#withBigtableOptions(BigtableOptions) BigtableOptions} that specifies
* the destination Cloud Bigtable instance, and a {@link BigtableIO.Write#withTableId tableId}
* that specifies which table to write.
*/
@Experimental
public static Write write() {
return new Write(null, "", null);
}
/**
* A {@link PTransform} that reads from Google Cloud Bigtable. See the class-level Javadoc on
* {@link BigtableIO} for more information.
*
* @see BigtableIO
*/
@Experimental
public static class Read extends PTransform<PBegin, PCollection<Row>> {
/**
* Returns a new {@link BigtableIO.Read} that will read from the Cloud Bigtable instance
* indicated by the given options, and using any other specified customizations.
*
* <p>Does not modify this object.
*/
public Read withBigtableOptions(BigtableOptions options) {
checkNotNull(options, "options");
return withBigtableOptions(options.toBuilder());
}
/**
* Returns a new {@link BigtableIO.Read} that will read from the Cloud Bigtable instance
* indicated by the given options, and using any other specified customizations.
*
* <p>Clones the given {@link BigtableOptions} builder so that any further changes
* will have no effect on the returned {@link BigtableIO.Read}.
*
* <p>Does not modify this object.
*/
public Read withBigtableOptions(BigtableOptions.Builder optionsBuilder) {
checkNotNull(optionsBuilder, "optionsBuilder");
// TODO: is there a better way to clone a Builder? Want it to be immune from user changes.
BigtableOptions options = optionsBuilder.build();
RetryOptions retryOptions = options.getRetryOptions();
// Set data channel count to one because there is only 1 scanner in this session
// Use retryOptionsToBuilder because absent in Bigtable library
// TODO: replace with RetryOptions.toBuilder() when added to Bigtable library
// Set batch size because of bug (incorrect initialization) in Bigtable library
// TODO: remove setRetryOptions when fixed in Bigtable library
BigtableOptions.Builder clonedBuilder = options.toBuilder()
.setDataChannelCount(1)
.setRetryOptions(
retryOptionsToBuilder(retryOptions)
.setStreamingBatchSize(Math.min(retryOptions.getStreamingBatchSize(),
retryOptions.getStreamingBufferSize() / 2))
.build());
BigtableOptions optionsWithAgent = clonedBuilder.setUserAgent(getUserAgent()).build();
return new Read(optionsWithAgent, tableId, keyRange, filter, bigtableService);
}
/**
* Returns a new {@link BigtableIO.Read} that will filter the rows read from Cloud Bigtable
* using the given row filter.
*
* <p>Does not modify this object.
*/
public Read withRowFilter(RowFilter filter) {
checkNotNull(filter, "filter");
return new Read(options, tableId, keyRange, filter, bigtableService);
}
/**
* Returns a new {@link BigtableIO.Read} that will read only rows in the specified range.
*
* <p>Does not modify this object.
*/
public Read withKeyRange(ByteKeyRange keyRange) {
checkNotNull(keyRange, "keyRange");
return new Read(options, tableId, keyRange, filter, bigtableService);
}
/**
* Returns a new {@link BigtableIO.Read} that will read from the specified table.
*
* <p>Does not modify this object.
*/
public Read withTableId(String tableId) {
checkNotNull(tableId, "tableId");
return new Read(options, tableId, keyRange, filter, bigtableService);
}
/**
* Returns the Google Cloud Bigtable instance being read from, and other parameters.
*/
public BigtableOptions getBigtableOptions() {
return options;
}
/**
* Returns the range of keys that will be read from the table. By default, returns
* {@link ByteKeyRange#ALL_KEYS} to scan the entire table.
*/
public ByteKeyRange getKeyRange() {
return keyRange;
}
/**
* Returns the table being read from.
*/
public String getTableId() {
return tableId;
}
@Override
public PCollection<Row> apply(PBegin input) {
BigtableSource source =
new BigtableSource(new SerializableFunction<PipelineOptions, BigtableService>() {
@Override
public BigtableService apply(PipelineOptions options) {
return getBigtableService(options);
}
}, tableId, filter, keyRange, null);
return input.getPipeline().apply(org.apache.beam.sdk.io.Read.from(source));
}
@Override
public void validate(PBegin input) {
checkArgument(options != null, "BigtableOptions not specified");
checkArgument(!tableId.isEmpty(), "Table ID not specified");
try {
checkArgument(
getBigtableService(input.getPipeline().getOptions()).tableExists(tableId),
"Table %s does not exist",
tableId);
} catch (IOException e) {
logger.warn("Error checking whether table {} exists; proceeding.", tableId, e);
}
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder.add(DisplayData.item("tableId", tableId)
.withLabel("Table ID"));
if (options != null) {
builder.add(DisplayData.item("bigtableOptions", options.toString())
.withLabel("Bigtable Options"));
}
builder.addIfNotDefault(
DisplayData.item("keyRange", keyRange.toString()), ByteKeyRange.ALL_KEYS.toString());
if (filter != null) {
builder.add(DisplayData.item("rowFilter", filter.toString())
.withLabel("Table Row Filter"));
}
}
@Override
public String toString() {
return MoreObjects.toStringHelper(Read.class)
.add("options", options)
.add("tableId", tableId)
.add("keyRange", keyRange)
.add("filter", filter)
.toString();
}
/////////////////////////////////////////////////////////////////////////////////////////
/**
* Used to define the Cloud Bigtable instance and any options for the networking layer.
* Cannot actually be {@code null} at validation time, but may start out {@code null} while
* source is being built.
*/
@Nullable private final BigtableOptions options;
private final String tableId;
private final ByteKeyRange keyRange;
@Nullable private final RowFilter filter;
@Nullable private final BigtableService bigtableService;
private Read(
@Nullable BigtableOptions options,
String tableId,
ByteKeyRange keyRange,
@Nullable RowFilter filter,
@Nullable BigtableService bigtableService) {
this.options = options;
this.tableId = checkNotNull(tableId, "tableId");
this.keyRange = checkNotNull(keyRange, "keyRange");
this.filter = filter;
this.bigtableService = bigtableService;
}
/**
* Returns a new {@link BigtableIO.Read} that will read using the given Cloud Bigtable
* service implementation.
*
* <p>This is used for testing.
*
* <p>Does not modify this object.
*/
Read withBigtableService(BigtableService bigtableService) {
checkNotNull(bigtableService, "bigtableService");
return new Read(options, tableId, keyRange, filter, bigtableService);
}
/**
* Helper function that either returns the mock Bigtable service supplied by
* {@link #withBigtableService} or creates and returns an implementation that talks to
* {@code Cloud Bigtable}.
*
* <p>Also populate the credentials option from {@link GcpOptions#getGcpCredential()} if the
* default credentials are being used on {@link BigtableOptions}.
*/
@VisibleForTesting
BigtableService getBigtableService(PipelineOptions pipelineOptions) {
if (bigtableService != null) {
return bigtableService;
}
BigtableOptions.Builder clonedOptions = options.toBuilder();
if (options.getCredentialOptions().getCredentialType() == CredentialType.DefaultCredentials) {
clonedOptions.setCredentialOptions(
CredentialOptions.credential(
pipelineOptions.as(GcpOptions.class).getGcpCredential()));
}
return new BigtableServiceImpl(clonedOptions.build());
}
}
/**
* A {@link PTransform} that writes to Google Cloud Bigtable. See the class-level Javadoc on
* {@link BigtableIO} for more information.
*
* @see BigtableIO
*/
@Experimental
public static class Write
extends PTransform<PCollection<KV<ByteString, Iterable<Mutation>>>, PDone> {
/**
* Used to define the Cloud Bigtable instance and any options for the networking layer.
* Cannot actually be {@code null} at validation time, but may start out {@code null} while
* source is being built.
*/
@Nullable private final BigtableOptions options;
private final String tableId;
@Nullable private final BigtableService bigtableService;
private Write(
@Nullable BigtableOptions options,
String tableId,
@Nullable BigtableService bigtableService) {
this.options = options;
this.tableId = checkNotNull(tableId, "tableId");
this.bigtableService = bigtableService;
}
/**
* Returns a new {@link BigtableIO.Write} that will write to the Cloud Bigtable instance
* indicated by the given options, and using any other specified customizations.
*
* <p>Does not modify this object.
*/
public Write withBigtableOptions(BigtableOptions options) {
checkNotNull(options, "options");
return withBigtableOptions(options.toBuilder());
}
/**
* Returns a new {@link BigtableIO.Write} that will write to the Cloud Bigtable instance
* indicated by the given options, and using any other specified customizations.
*
* <p>Clones the given {@link BigtableOptions} builder so that any further changes
* will have no effect on the returned {@link BigtableIO.Write}.
*
* <p>Does not modify this object.
*/
public Write withBigtableOptions(BigtableOptions.Builder optionsBuilder) {
checkNotNull(optionsBuilder, "optionsBuilder");
// TODO: is there a better way to clone a Builder? Want it to be immune from user changes.
BigtableOptions options = optionsBuilder.build();
RetryOptions retryOptions = options.getRetryOptions();
// Set useBulkApi to true for enabling bulk writes
// Use retryOptionsToBuilder because absent in Bigtable library
// TODO: replace with RetryOptions.toBuilder() when added to Bigtable library
// Set batch size because of bug (incorrect initialization) in Bigtable library
// TODO: remove setRetryOptions when fixed in Bigtable library
BigtableOptions.Builder clonedBuilder = options.toBuilder()
.setBulkOptions(
options.getBulkOptions().toBuilder()
.setUseBulkApi(true)
.build())
.setRetryOptions(
retryOptionsToBuilder(retryOptions)
.setStreamingBatchSize(Math.min(retryOptions.getStreamingBatchSize(),
retryOptions.getStreamingBufferSize() / 2))
.build());
BigtableOptions optionsWithAgent = clonedBuilder.setUserAgent(getUserAgent()).build();
return new Write(optionsWithAgent, tableId, bigtableService);
}
/**
* Returns a new {@link BigtableIO.Write} that will write to the specified table.
*
* <p>Does not modify this object.
*/
public Write withTableId(String tableId) {
checkNotNull(tableId, "tableId");
return new Write(options, tableId, bigtableService);
}
/**
* Returns the Google Cloud Bigtable instance being written to, and other parameters.
*/
public BigtableOptions getBigtableOptions() {
return options;
}
/**
* Returns the table being written to.
*/
public String getTableId() {
return tableId;
}
@Override
public PDone apply(PCollection<KV<ByteString, Iterable<Mutation>>> input) {
input.apply(ParDo.of(new BigtableWriterFn(tableId,
new SerializableFunction<PipelineOptions, BigtableService>() {
@Override
public BigtableService apply(PipelineOptions options) {
return getBigtableService(options);
}
})));
return PDone.in(input.getPipeline());
}
@Override
public void validate(PCollection<KV<ByteString, Iterable<Mutation>>> input) {
checkArgument(options != null, "BigtableOptions not specified");
checkArgument(!tableId.isEmpty(), "Table ID not specified");
try {
checkArgument(
getBigtableService(input.getPipeline().getOptions()).tableExists(tableId),
"Table %s does not exist",
tableId);
} catch (IOException e) {
logger.warn("Error checking whether table {} exists; proceeding.", tableId, e);
}
}
/**
* Returns a new {@link BigtableIO.Write} that will write using the given Cloud Bigtable
* service implementation.
*
* <p>This is used for testing.
*
* <p>Does not modify this object.
*/
Write withBigtableService(BigtableService bigtableService) {
checkNotNull(bigtableService, "bigtableService");
return new Write(options, tableId, bigtableService);
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder.add(DisplayData.item("tableId", tableId)
.withLabel("Table ID"));
if (options != null) {
builder.add(DisplayData.item("bigtableOptions", options.toString())
.withLabel("Bigtable Options"));
}
}
@Override
public String toString() {
return MoreObjects.toStringHelper(Write.class)
.add("options", options)
.add("tableId", tableId)
.toString();
}
/**
* Helper function that either returns the mock Bigtable service supplied by
* {@link #withBigtableService} or creates and returns an implementation that talks to
* {@code Cloud Bigtable}.
*
* <p>Also populate the credentials option from {@link GcpOptions#getGcpCredential()} if the
* default credentials are being used on {@link BigtableOptions}.
*/
@VisibleForTesting
BigtableService getBigtableService(PipelineOptions pipelineOptions) {
if (bigtableService != null) {
return bigtableService;
}
BigtableOptions.Builder clonedOptions = options.toBuilder();
if (options.getCredentialOptions().getCredentialType() == CredentialType.DefaultCredentials) {
clonedOptions.setCredentialOptions(
CredentialOptions.credential(
pipelineOptions.as(GcpOptions.class).getGcpCredential()));
}
return new BigtableServiceImpl(clonedOptions.build());
}
private class BigtableWriterFn extends DoFn<KV<ByteString, Iterable<Mutation>>, Void> {
public BigtableWriterFn(String tableId,
SerializableFunction<PipelineOptions, BigtableService> bigtableServiceFactory) {
this.tableId = checkNotNull(tableId, "tableId");
this.bigtableServiceFactory =
checkNotNull(bigtableServiceFactory, "bigtableServiceFactory");
this.failures = new ConcurrentLinkedQueue<>();
}
@StartBundle
public void startBundle(Context c) throws IOException {
if (bigtableWriter == null) {
bigtableWriter = bigtableServiceFactory.apply(
c.getPipelineOptions()).openForWriting(tableId);
}
recordsWritten = 0;
}
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
checkForFailures();
Futures.addCallback(
bigtableWriter.writeRecord(c.element()), new WriteExceptionCallback(c.element()));
++recordsWritten;
}
@FinishBundle
public void finishBundle(Context c) throws Exception {
bigtableWriter.flush();
checkForFailures();
logger.info("Wrote {} records", recordsWritten);
}
@Teardown
public void tearDown() throws Exception {
bigtableWriter.close();
bigtableWriter = null;
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
builder.delegate(Write.this);
}
///////////////////////////////////////////////////////////////////////////////
private final String tableId;
private final SerializableFunction<PipelineOptions, BigtableService> bigtableServiceFactory;
private BigtableService.Writer bigtableWriter;
private long recordsWritten;
private final ConcurrentLinkedQueue<BigtableWriteException> failures;
/**
* If any write has asynchronously failed, fail the bundle with a useful error.
*/
private void checkForFailures() throws IOException {
// Note that this function is never called by multiple threads and is the only place that
// we remove from failures, so this code is safe.
if (failures.isEmpty()) {
return;
}
StringBuilder logEntry = new StringBuilder();
int i = 0;
for (; i < 10 && !failures.isEmpty(); ++i) {
BigtableWriteException exc = failures.remove();
logEntry.append("\n").append(exc.getMessage());
if (exc.getCause() != null) {
logEntry.append(": ").append(exc.getCause().getMessage());
}
}
String message =
String.format(
"At least %d errors occurred writing to Bigtable. First %d errors: %s",
i + failures.size(),
i,
logEntry.toString());
logger.error(message);
throw new IOException(message);
}
private class WriteExceptionCallback implements FutureCallback<MutateRowResponse> {
private final KV<ByteString, Iterable<Mutation>> value;
public WriteExceptionCallback(KV<ByteString, Iterable<Mutation>> value) {
this.value = value;
}
@Override
public void onFailure(Throwable cause) {
failures.add(new BigtableWriteException(value, cause));
}
@Override
public void onSuccess(MutateRowResponse produced) {}
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////
/** Disallow construction of utility class. */
private BigtableIO() {}
static class BigtableSource extends BoundedSource<Row> {
public BigtableSource(
SerializableFunction<PipelineOptions, BigtableService> serviceFactory,
String tableId,
@Nullable RowFilter filter,
ByteKeyRange range,
@Nullable Long estimatedSizeBytes) {
this.serviceFactory = serviceFactory;
this.tableId = tableId;
this.filter = filter;
this.range = range;
this.estimatedSizeBytes = estimatedSizeBytes;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(BigtableSource.class)
.add("tableId", tableId)
.add("filter", filter)
.add("range", range)
.add("estimatedSizeBytes", estimatedSizeBytes)
.toString();
}
////// Private state and internal implementation details //////
private final SerializableFunction<PipelineOptions, BigtableService> serviceFactory;
private final String tableId;
@Nullable private final RowFilter filter;
private final ByteKeyRange range;
@Nullable private Long estimatedSizeBytes;
@Nullable private transient List<SampleRowKeysResponse> sampleRowKeys;
protected BigtableSource withStartKey(ByteKey startKey) {
checkNotNull(startKey, "startKey");
return new BigtableSource(
serviceFactory, tableId, filter, range.withStartKey(startKey), estimatedSizeBytes);
}
protected BigtableSource withEndKey(ByteKey endKey) {
checkNotNull(endKey, "endKey");
return new BigtableSource(
serviceFactory, tableId, filter, range.withEndKey(endKey), estimatedSizeBytes);
}
protected BigtableSource withEstimatedSizeBytes(Long estimatedSizeBytes) {
checkNotNull(estimatedSizeBytes, "estimatedSizeBytes");
return new BigtableSource(serviceFactory, tableId, filter, range, estimatedSizeBytes);
}
/**
* Makes an API call to the Cloud Bigtable service that gives information about tablet key
* boundaries and estimated sizes. We can use these samples to ensure that splits are on
* different tablets, and possibly generate sub-splits within tablets.
*/
private List<SampleRowKeysResponse> getSampleRowKeys(PipelineOptions pipelineOptions)
throws IOException {
return serviceFactory.apply(pipelineOptions).getSampleRowKeys(this);
}
@Override
public List<BigtableSource> splitIntoBundles(
long desiredBundleSizeBytes, PipelineOptions options) throws Exception {
// Update the desiredBundleSizeBytes in order to limit the
// number of splits to maximumNumberOfSplits.
long maximumNumberOfSplits = 4000;
long sizeEstimate = getEstimatedSizeBytes(options);
desiredBundleSizeBytes =
Math.max(sizeEstimate / maximumNumberOfSplits, desiredBundleSizeBytes);
// Delegate to testable helper.
return splitIntoBundlesBasedOnSamples(desiredBundleSizeBytes, getSampleRowKeys(options));
}
/** Helper that splits this source into bundles based on Cloud Bigtable sampled row keys. */
private List<BigtableSource> splitIntoBundlesBasedOnSamples(
long desiredBundleSizeBytes, List<SampleRowKeysResponse> sampleRowKeys) {
// There are no regions, or no samples available. Just scan the entire range.
if (sampleRowKeys.isEmpty()) {
logger.info("Not splitting source {} because no sample row keys are available.", this);
return Collections.singletonList(this);
}
logger.info(
"About to split into bundles of size {} with sampleRowKeys length {} first element {}",
desiredBundleSizeBytes,
sampleRowKeys.size(),
sampleRowKeys.get(0));
// Loop through all sampled responses and generate splits from the ones that overlap the
// scan range. The main complication is that we must track the end range of the previous
// sample to generate good ranges.
ByteKey lastEndKey = ByteKey.EMPTY;
long lastOffset = 0;
ImmutableList.Builder<BigtableSource> splits = ImmutableList.builder();
for (SampleRowKeysResponse response : sampleRowKeys) {
ByteKey responseEndKey = ByteKey.of(response.getRowKey());
long responseOffset = response.getOffsetBytes();
checkState(
responseOffset >= lastOffset,
"Expected response byte offset %s to come after the last offset %s",
responseOffset,
lastOffset);
if (!range.overlaps(ByteKeyRange.of(lastEndKey, responseEndKey))) {
// This region does not overlap the scan, so skip it.
lastOffset = responseOffset;
lastEndKey = responseEndKey;
continue;
}
// Calculate the beginning of the split as the larger of startKey and the end of the last
// split. Unspecified start is smallest key so is correctly treated as earliest key.
ByteKey splitStartKey = lastEndKey;
if (splitStartKey.compareTo(range.getStartKey()) < 0) {
splitStartKey = range.getStartKey();
}
// Calculate the end of the split as the smaller of endKey and the end of this sample. Note
// that range.containsKey handles the case when range.getEndKey() is empty.
ByteKey splitEndKey = responseEndKey;
if (!range.containsKey(splitEndKey)) {
splitEndKey = range.getEndKey();
}
// We know this region overlaps the desired key range, and we know a rough estimate of its
// size. Split the key range into bundle-sized chunks and then add them all as splits.
long sampleSizeBytes = responseOffset - lastOffset;
List<BigtableSource> subSplits =
splitKeyRangeIntoBundleSizedSubranges(
sampleSizeBytes,
desiredBundleSizeBytes,
ByteKeyRange.of(splitStartKey, splitEndKey));
splits.addAll(subSplits);
// Move to the next region.
lastEndKey = responseEndKey;
lastOffset = responseOffset;
}
// We must add one more region after the end of the samples if both these conditions hold:
// 1. we did not scan to the end yet (lastEndKey is concrete, not 0-length).
// 2. we want to scan to the end (endKey is empty) or farther (lastEndKey < endKey).
if (!lastEndKey.isEmpty()
&& (range.getEndKey().isEmpty() || lastEndKey.compareTo(range.getEndKey()) < 0)) {
splits.add(this.withStartKey(lastEndKey).withEndKey(range.getEndKey()));
}
List<BigtableSource> ret = splits.build();
logger.info("Generated {} splits. First split: {}", ret.size(), ret.get(0));
return ret;
}
@Override
public long getEstimatedSizeBytes(PipelineOptions options) throws IOException {
// Delegate to testable helper.
if (estimatedSizeBytes == null) {
estimatedSizeBytes = getEstimatedSizeBytesBasedOnSamples(getSampleRowKeys(options));
}
return estimatedSizeBytes;
}
/**
* Computes the estimated size in bytes based on the total size of all samples that overlap
* the key range this source will scan.
*/
private long getEstimatedSizeBytesBasedOnSamples(List<SampleRowKeysResponse> samples) {
long estimatedSizeBytes = 0;
long lastOffset = 0;
ByteKey currentStartKey = ByteKey.EMPTY;
// Compute the total estimated size as the size of each sample that overlaps the scan range.
// TODO: In future, Bigtable service may provide finer grained APIs, e.g., to sample given a
// filter or to sample on a given key range.
for (SampleRowKeysResponse response : samples) {
ByteKey currentEndKey = ByteKey.of(response.getRowKey());
long currentOffset = response.getOffsetBytes();
if (!currentStartKey.isEmpty() && currentStartKey.equals(currentEndKey)) {
// Skip an empty region.
lastOffset = currentOffset;
continue;
} else if (range.overlaps(ByteKeyRange.of(currentStartKey, currentEndKey))) {
estimatedSizeBytes += currentOffset - lastOffset;
}
currentStartKey = currentEndKey;
lastOffset = currentOffset;
}
return estimatedSizeBytes;
}
/**
* Cloud Bigtable returns query results ordered by key.
*/
@Override
public boolean producesSortedKeys(PipelineOptions options) throws Exception {
return true;
}
@Override
public BoundedReader<Row> createReader(PipelineOptions options) throws IOException {
return new BigtableReader(this, serviceFactory.apply(options));
}
@Override
public void validate() {
checkArgument(!tableId.isEmpty(), "tableId cannot be empty");
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder.add(DisplayData.item("tableId", tableId)
.withLabel("Table ID"));
if (filter != null) {
builder.add(DisplayData.item("rowFilter", filter.toString())
.withLabel("Table Row Filter"));
}
}
@Override
public Coder<Row> getDefaultOutputCoder() {
return ProtoCoder.of(Row.class);
}
/** Helper that splits the specified range in this source into bundles. */
private List<BigtableSource> splitKeyRangeIntoBundleSizedSubranges(
long sampleSizeBytes, long desiredBundleSizeBytes, ByteKeyRange range) {
// Catch the trivial cases. Split is small enough already, or this is the last region.
logger.debug(
"Subsplit for sampleSizeBytes {} and desiredBundleSizeBytes {}",
sampleSizeBytes,
desiredBundleSizeBytes);
if (sampleSizeBytes <= desiredBundleSizeBytes) {
return Collections.singletonList(
this.withStartKey(range.getStartKey()).withEndKey(range.getEndKey()));
}
checkArgument(
sampleSizeBytes > 0, "Sample size %s bytes must be greater than 0.", sampleSizeBytes);
checkArgument(
desiredBundleSizeBytes > 0,
"Desired bundle size %s bytes must be greater than 0.",
desiredBundleSizeBytes);
int splitCount = (int) Math.ceil(((double) sampleSizeBytes) / (desiredBundleSizeBytes));
List<ByteKey> splitKeys = range.split(splitCount);
ImmutableList.Builder<BigtableSource> splits = ImmutableList.builder();
Iterator<ByteKey> keys = splitKeys.iterator();
ByteKey prev = keys.next();
while (keys.hasNext()) {
ByteKey next = keys.next();
splits.add(
this
.withStartKey(prev)
.withEndKey(next)
.withEstimatedSizeBytes(sampleSizeBytes / splitCount));
prev = next;
}
return splits.build();
}
public ByteKeyRange getRange() {
return range;
}
public RowFilter getRowFilter() {
return filter;
}
public String getTableId() {
return tableId;
}
}
private static class BigtableReader extends BoundedReader<Row> {
// Thread-safety: source is protected via synchronization and is only accessed or modified
// inside a synchronized block (or constructor, which is the same).
private BigtableSource source;
private BigtableService service;
private BigtableService.Reader reader;
private final ByteKeyRangeTracker rangeTracker;
private long recordsReturned;
public BigtableReader(BigtableSource source, BigtableService service) {
this.source = source;
this.service = service;
rangeTracker = ByteKeyRangeTracker.of(source.getRange());
}
@Override
public boolean start() throws IOException {
reader = service.createReader(getCurrentSource());
boolean hasRecord =
reader.start()
&& rangeTracker.tryReturnRecordAt(true, ByteKey.of(reader.getCurrentRow().getKey()))
|| rangeTracker.markDone();
if (hasRecord) {
++recordsReturned;
}
return hasRecord;
}
@Override
public synchronized BigtableSource getCurrentSource() {
return source;
}
@Override
public boolean advance() throws IOException {
boolean hasRecord =
reader.advance()
&& rangeTracker.tryReturnRecordAt(true, ByteKey.of(reader.getCurrentRow().getKey()))
|| rangeTracker.markDone();
if (hasRecord) {
++recordsReturned;
}
return hasRecord;
}
@Override
public Row getCurrent() throws NoSuchElementException {
return reader.getCurrentRow();
}
@Override
public void close() throws IOException {
logger.info("Closing reader after reading {} records.", recordsReturned);
if (reader != null) {
reader.close();
reader = null;
}
}
@Override
public final Double getFractionConsumed() {
return rangeTracker.getFractionConsumed();
}
@Override
public final long getSplitPointsConsumed() {
return rangeTracker.getSplitPointsConsumed();
}
@Override
public final synchronized BigtableSource splitAtFraction(double fraction) {
ByteKey splitKey;
try {
splitKey = rangeTracker.getRange().interpolateKey(fraction);
} catch (IllegalArgumentException e) {
logger.info(
"%s: Failed to interpolate key for fraction %s.", rangeTracker.getRange(), fraction);
return null;
}
logger.debug(
"Proposing to split {} at fraction {} (key {})", rangeTracker, fraction, splitKey);
BigtableSource primary = source.withEndKey(splitKey);
BigtableSource residual = source.withStartKey(splitKey);
if (!rangeTracker.trySplitAtPosition(splitKey)) {
return null;
}
this.source = primary;
return residual;
}
}
/**
* An exception that puts information about the failed record being written in its message.
*/
static class BigtableWriteException extends IOException {
public BigtableWriteException(KV<ByteString, Iterable<Mutation>> record, Throwable cause) {
super(
String.format(
"Error mutating row %s with mutations %s",
record.getKey().toStringUtf8(),
record.getValue()),
cause);
}
}
/**
* A helper function to produce a Cloud Bigtable user agent string.
*/
private static String getUserAgent() {
String javaVersion = System.getProperty("java.specification.version");
ReleaseInfo info = ReleaseInfo.getReleaseInfo();
return String.format(
"%s/%s (%s); %s",
info.getName(),
info.getVersion(),
javaVersion,
"0.3.0" /* TODO get Bigtable client version directly from jar. */);
}
/**
* A helper function to convert a RetryOptions into a RetryOptions.Builder.
*/
private static RetryOptions.Builder retryOptionsToBuilder(RetryOptions options) {
RetryOptions.Builder builder = new RetryOptions.Builder();
builder.setEnableRetries(options.enableRetries());
builder.setInitialBackoffMillis(options.getInitialBackoffMillis());
builder.setBackoffMultiplier(options.getBackoffMultiplier());
builder.setMaxElapsedBackoffMillis(options.getMaxElaspedBackoffMillis());
builder.setStreamingBufferSize(options.getStreamingBufferSize());
builder.setStreamingBatchSize(options.getStreamingBatchSize());
builder.setReadPartialRowTimeoutMillis(options.getReadPartialRowTimeoutMillis());
builder.setMaxScanTimeoutRetries(options.getMaxScanTimeoutRetries());
builder.setAllowRetriesWithoutTimestamp(options.allowRetriesWithoutTimestamp());
for (Status.Code code : Status.Code.values()) {
if (options.isRetryable(code)) {
builder.addStatusToRetryOn(code);
}
}
return builder;
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.server.spi.swagger;
import static com.google.api.server.spi.config.model.EndpointsFlag.MAP_SCHEMA_FORCE_JSON_MAP_SCHEMA;
import static com.google.api.server.spi.config.model.EndpointsFlag.MAP_SCHEMA_IGNORE_UNSUPPORTED_KEY_TYPES;
import static com.google.api.server.spi.config.model.EndpointsFlag.MAP_SCHEMA_SUPPORT_ARRAYS_VALUES;
import static com.google.common.truth.Truth.assertThat;
import com.google.api.server.spi.Constant;
import com.google.api.server.spi.IoUtil;
import com.google.api.server.spi.ServiceContext;
import com.google.api.server.spi.ServiceException;
import com.google.api.server.spi.TypeLoader;
import com.google.api.server.spi.config.AnnotationBoolean;
import com.google.api.server.spi.config.Api;
import com.google.api.server.spi.config.ApiConfigLoader;
import com.google.api.server.spi.config.ApiIssuer;
import com.google.api.server.spi.config.ApiIssuerAudience;
import com.google.api.server.spi.config.ApiMethod;
import com.google.api.server.spi.config.ApiMethod.HttpMethod;
import com.google.api.server.spi.config.Named;
import com.google.api.server.spi.config.annotationreader.ApiConfigAnnotationReader;
import com.google.api.server.spi.config.model.ApiConfig;
import com.google.api.server.spi.response.BadRequestException;
import com.google.api.server.spi.response.ConflictException;
import com.google.api.server.spi.response.NotFoundException;
import com.google.api.server.spi.swagger.SwaggerGenerator.SwaggerContext;
import com.google.api.server.spi.testing.AbsoluteCommonPathEndpoint;
import com.google.api.server.spi.testing.AbsolutePathEndpoint;
import com.google.api.server.spi.testing.ArrayEndpoint;
import com.google.api.server.spi.testing.EnumEndpoint;
import com.google.api.server.spi.testing.FooCommonParamsEndpoint;
import com.google.api.server.spi.testing.FooDescriptionEndpoint;
import com.google.api.server.spi.testing.FooEndpoint;
import com.google.api.server.spi.testing.LimitMetricsEndpoint;
import com.google.api.server.spi.testing.MapEndpoint;
import com.google.api.server.spi.testing.MapEndpointInvalid;
import com.google.api.server.spi.testing.MultiResourceEndpoint.NoResourceEndpoint;
import com.google.api.server.spi.testing.MultiResourceEndpoint.Resource1Endpoint;
import com.google.api.server.spi.testing.MultiResourceEndpoint.Resource2Endpoint;
import com.google.api.server.spi.testing.MultiVersionEndpoint.Version1Endpoint;
import com.google.api.server.spi.testing.MultiVersionEndpoint.Version2Endpoint;
import com.google.api.server.spi.testing.OptionalEndpoint;
import com.google.api.server.spi.testing.RequiredPropertiesEndpoint;
import com.google.api.server.spi.testing.SpecialCharsEndpoint;
import com.google.api.server.spi.testing.ResponseStatusEndpoint;
import com.google.api.server.spi.testing.ValidationEndpoint;
import com.google.common.collect.ImmutableList;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import io.swagger.models.Swagger;
import io.swagger.util.Json;
/**
* Tests for {@link SwaggerGenerator}.
*/
@RunWith(JUnit4.class)
public class SwaggerGeneratorTest {
private final SwaggerGenerator generator = new SwaggerGenerator();
private final SwaggerContext context = new SwaggerContext()
.setScheme("https")
.setHostname("swagger-test.appspot.com")
.setBasePath("/api");
private final ObjectMapper mapper = Json.mapper();
private ApiConfigLoader configLoader;
@Before
public void setUp() throws Exception {
TypeLoader typeLoader = new TypeLoader(getClass().getClassLoader());
ApiConfigAnnotationReader annotationReader =
new ApiConfigAnnotationReader(typeLoader.getAnnotationTypes());
this.configLoader = new ApiConfigLoader(new ApiConfig.Factory(), typeLoader,
annotationReader);
}
@Test
public void testWriteSwagger_FooEndpoint() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), FooEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("foo_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_FooEndpointCustomTemplates() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), FooEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), new SwaggerContext()
.setTagTemplate("${ApiName}${ApiVersion}")
.setOperationIdTemplate("${apiName}-${apiVersion}-${method}")
);
Swagger expected = readExpectedAsSwagger("foo_endpoint_custom_templates.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_FooEndpointParameterCombineParamSamePath() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(),
FooCommonParamsEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context
.setCombineCommonParametersInSamePath(true));
Swagger expected = readExpectedAsSwagger("foo_endpoint_combine_params_same_path.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_FooEndpointParameterExtractParamRef() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(),
FooCommonParamsEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context
.setExtractCommonParametersAsRefs(true));
Swagger expected = readExpectedAsSwagger("foo_endpoint_extract_param_refs.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_FooEndpointParameterCombineAllParam() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(),
FooCommonParamsEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context
.setExtractCommonParametersAsRefs(true)
.setCombineCommonParametersInSamePath(true));
Swagger expected = readExpectedAsSwagger("foo_endpoint_combine_all_params.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_FooEndpointDefaultContext() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), FooEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), new SwaggerContext());
Swagger expected = readExpectedAsSwagger("foo_endpoint_default_context.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_FooEndpointWithApiName() throws Exception {
Swagger swagger = getSwagger(
FooEndpoint.class, new SwaggerContext().setApiName("customApiName"));
Swagger expected = readExpectedAsSwagger("foo_endpoint_api_name.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_EnumEndpoint() throws Exception {
Swagger swagger = getSwagger(EnumEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("enum_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_ArrayEndpoint() throws Exception {
Swagger swagger = getSwagger(ArrayEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("array_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_MapEndpoint() throws Exception {
Swagger swagger = getSwagger(MapEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("map_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_MapEndpoint_Legacy() throws Exception {
System.setProperty(MAP_SCHEMA_FORCE_JSON_MAP_SCHEMA.systemPropertyName, "");
try {
Swagger swagger = getSwagger(MapEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("map_endpoint_legacy.swagger");
checkSwagger(expected, swagger);
} finally {
System.clearProperty(MAP_SCHEMA_FORCE_JSON_MAP_SCHEMA.systemPropertyName);
}
}
@Test
public void testWriteDiscovery_MapEndpoint_InvalidKeyType() throws Exception {
try {
getSwagger(MapEndpointInvalid.class, new SwaggerContext());
Assert.fail("Should have failed to generate schema for invalid key type");
} catch (IllegalArgumentException e) {
//expected
}
}
@Test
public void testWriteDiscovery_MapEndpoint_InvalidKeyType_ignore() throws Exception {
System.setProperty(MAP_SCHEMA_IGNORE_UNSUPPORTED_KEY_TYPES.systemPropertyName, "true");
try {
getSwagger(MapEndpointInvalid.class, new SwaggerContext());
} finally {
System.clearProperty(MAP_SCHEMA_IGNORE_UNSUPPORTED_KEY_TYPES.systemPropertyName);
}
}
@Test
public void testWriteSwagger_MapEndpoint_WithArrayValue() throws Exception {
System.setProperty(MAP_SCHEMA_SUPPORT_ARRAYS_VALUES.systemPropertyName, "TRUE");
try {
Swagger swagger = getSwagger(MapEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("map_endpoint_with_array.swagger");
checkSwagger(expected, swagger);
} finally {
System.clearProperty(MAP_SCHEMA_SUPPORT_ARRAYS_VALUES.systemPropertyName);
}
}
@Test
public void testWriteSwagger_ThirdPartyAuthEndpoint() throws Exception {
ApiConfig config =
configLoader.loadConfiguration(ServiceContext.create(), ThirdPartyAuthEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("third_party_auth.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_GoogleAuthEndpoint() throws Exception {
ApiConfig config =
configLoader.loadConfiguration(ServiceContext.create(), GoogleAuthEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("google_auth.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_MultipleScopes() throws Exception {
ApiConfig config =
configLoader.loadConfiguration(ServiceContext.create(), MultipleScopesEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("multiple_scopes.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_ApiKeys() throws Exception {
ApiConfig config =
configLoader.loadConfiguration(ServiceContext.create(), ApiKeysEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("api_keys.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_AbsolutePathEndpoint() throws Exception {
Swagger swagger = getSwagger(AbsolutePathEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("absolute_path_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_AbsoluteCommonPathEndpoint() throws Exception {
Swagger swagger = getSwagger(AbsoluteCommonPathEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("absolute_common_path_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_LimitMetricsEndpoint() throws Exception {
Swagger swagger = getSwagger(LimitMetricsEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("limit_metrics_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_FooEndpointWithDescription() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), FooDescriptionEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("foo_with_description_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_RequiredPropertiesEndpoint() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), RequiredPropertiesEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("required_parameters_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_MultiResourceEndpoint() throws Exception {
ServiceContext serviceContext = ServiceContext.create();
ImmutableList<ApiConfig> configs = ImmutableList.of(
configLoader.loadConfiguration(serviceContext, NoResourceEndpoint.class),
configLoader.loadConfiguration(serviceContext, Resource1Endpoint.class),
configLoader.loadConfiguration(serviceContext, Resource2Endpoint.class));
Swagger swagger = generator.writeSwagger(configs, context);
Swagger expected = readExpectedAsSwagger("multi_resource_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_MultiVersionEndpoint() throws Exception {
ServiceContext serviceContext = ServiceContext.create();
ImmutableList<ApiConfig> configs = ImmutableList.of(
configLoader.loadConfiguration(serviceContext, Version1Endpoint.class),
configLoader.loadConfiguration(serviceContext, Version2Endpoint.class));
Swagger swagger = generator.writeSwagger(configs, context);
Swagger expected = readExpectedAsSwagger("multi_version_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_ErrorAsDefaultResponse() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), ExceptionEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context
.setAddGoogleJsonErrorAsDefaultResponse(true));
Swagger expected = readExpectedAsSwagger("error_codes_default_response.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_ServiceExceptionErrorCodes() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), ExceptionEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context
.setAddErrorCodesForServiceExceptions(true));
Swagger expected = readExpectedAsSwagger("error_codes_service_exceptions.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_ResponseStatus() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), ResponseStatusEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("response_status.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_AllErrors() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), ExceptionEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context
.setAddGoogleJsonErrorAsDefaultResponse(true)
.setAddErrorCodesForServiceExceptions(true));
Swagger expected = readExpectedAsSwagger("error_codes_all.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_Optional() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), OptionalEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context);
Swagger expected = readExpectedAsSwagger("optional_endpoint.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testEquivalentPathsNotAccepted() {
try {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), EquivalentPathsEndpoint.class);
generator.writeSwagger(ImmutableList.of(config), context);
} catch (Exception e) {
assertThat(e).isInstanceOf(IllegalStateException.class);
assertThat(e).hasMessageThat().contains("Equivalent paths found");
}
}
@Test
public void testWriteSwagger_SpecialChars() throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), SpecialCharsEndpoint.class);
Swagger swagger = generator.writeSwagger(ImmutableList.of(config), context
.setExtractCommonParametersAsRefs(true));
Swagger expected = readExpectedAsSwagger("special_chars.swagger");
checkSwagger(expected, swagger);
}
@Test
public void testWriteSwagger_ValidationEndpoint() throws Exception {
Swagger swagger = getSwagger(ValidationEndpoint.class, new SwaggerContext());
Swagger expected = readExpectedAsSwagger("validation_endpoint.swagger");
checkSwagger(expected, swagger);
}
private Swagger getSwagger(Class<?> serviceClass, SwaggerContext context)
throws Exception {
ApiConfig config = configLoader.loadConfiguration(ServiceContext.create(), serviceClass);
return generator.writeSwagger(ImmutableList.of(config), context);
}
private Swagger readExpectedAsSwagger(String file) throws Exception {
String expectedString = IoUtil.readResourceFile(SwaggerGeneratorTest.class, file);
return mapper.readValue(expectedString, Swagger.class);
}
private void checkSwagger(Swagger expected, Swagger actual) {
SwaggerSubject.assertThat(actual).isValid();
SwaggerSubject.assertThat(actual).isSameAs(expected);
}
@Api(name = "thirdparty", version = "v1",
issuers = {
@ApiIssuer(name = "auth0", issuer = "https://test.auth0.com/authorize",
jwksUri = "https://test.auth0.com/.wellknown/jwks.json"),
@ApiIssuer(name = "nojwks", issuer = "https://nojwks.com")
},
issuerAudiences = {
@ApiIssuerAudience(name = "auth0", audiences = "auth0audapi")
})
private static class ThirdPartyAuthEndpoint {
@ApiMethod(
issuerAudiences = {
@ApiIssuerAudience(name = "auth0", audiences = "auth0audmethod")
}
)
public void authOverride() { }
public void noOverride() { }
}
private static class GoogleAuthEndpoint extends ThirdPartyAuthEndpoint {
@ApiMethod(
issuerAudiences = {
@ApiIssuerAudience(name = Constant.GOOGLE_ID_TOKEN_ALT, audiences = "googleaud")
}
)
public void googleAuth() { }
}
@Api(name = "apikeys", version = "v1",
issuers = {
@ApiIssuer(name = "auth0", issuer = "https://test.auth0.com/authorize",
jwksUri = "https://test.auth0.com/.wellknown/jwks.json")
},
apiKeyRequired = AnnotationBoolean.TRUE)
private static class ApiKeysEndpoint {
@ApiMethod(apiKeyRequired = AnnotationBoolean.FALSE)
public void overrideApiKeySetting() { }
@ApiMethod
public void inheritApiKeySetting() { }
@ApiMethod(
issuerAudiences = {
@ApiIssuerAudience(name = "auth0", audiences = "auth0audmethod")
})
public void apiKeyWithAuth() { }
}
@Api(name = "multipleScopes",
version = "v1",
audiences = {"audience"},
scopes = "https://mail.google.com/")
private static class MultipleScopesEndpoint {
@ApiMethod
public void noOverride() { }
@ApiMethod(scopes = Constant.API_EMAIL_SCOPE)
public void scopeOverride() { }
@ApiMethod(scopes = "unknownScope")
public void unknownScope() { }
@ApiMethod(audiences = {"audience2"})
public void overrideAudience() { }
}
@Api(name = "exceptions", version = "v1")
private static class ExceptionEndpoint {
@ApiMethod
public void doesNotThrow() { }
@ApiMethod
public void throwsServiceException() throws ServiceException { }
@ApiMethod
public void throwsNotFoundException() throws NotFoundException { }
@ApiMethod
public void throwsMultipleExceptions() throws BadRequestException, ConflictException { }
@ApiMethod
public void throwsUnknownException() throws IllegalStateException { }
}
@Api(name = "equivalentPaths", version = "v1")
private static class EquivalentPathsEndpoint {
@ApiMethod(path = "foo/{id}", httpMethod = HttpMethod.GET)
public void path1(@Named("id") String id) { }
@ApiMethod(path = "foo/{fooId}", httpMethod = HttpMethod.POST)
public void path2(@Named("fooId") String fooId) { }
}
}
| |
/**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.apphosting.vmruntime.jetty9;
import com.google.appengine.api.utils.SystemProperty;
import com.google.apphosting.api.ApiProxy;
import com.google.apphosting.vmruntime.VmApiProxyDelegate;
import com.google.apphosting.vmruntime.VmApiProxyEnvironment;
import com.google.apphosting.vmruntime.VmRuntimeUtils;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;
import org.junit.Ignore;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
/**
* Misc individual Jetty9 vmengines tests.
*
*/
public class VmRuntimeJettyKitchenSinkTest extends VmRuntimeTestBase {
@Override
protected void setUp() throws Exception {
appengineWebXml = "WEB-INF/sessions-disabled-appengine-web.xml";
File webinf = new File("./WEB-INF");
if (!webinf.exists() || !webinf.isDirectory()) {
webinf = new File("./target/webapps/testwebapp/WEB-INF");
if (!webinf.exists() || !webinf.isDirectory()) {
System.err.println(webinf.getAbsolutePath());
throw new IllegalStateException("Incorrect working directory " + new File(".").getAbsoluteFile().getCanonicalPath());
}
}
File small = new File(webinf.getParentFile(), "small.txt");
if (!small.exists()) {
Writer out = new OutputStreamWriter(new FileOutputStream(small));
out.write("zero\n");
out.write("one\n");
out.write("two\n");
out.close();
}
File big = new File(webinf.getParentFile(), "big.txt");
if (!big.exists()) {
Writer out = new OutputStreamWriter(new FileOutputStream(big));
for (int i = 0; i < 4096; i++) {
out.write(Integer.toString(i));
out.write("\n");
}
out.close();
}
File huge = new File(webinf.getParentFile(), "huge.txt");
if (!huge.exists()) {
Writer out = new OutputStreamWriter(new FileOutputStream(huge));
for (int i = 0; i < 16 * 1024 * 1024; i++) {
out.write(Integer.toString(i));
out.write("\n");
}
out.close();
}
super.setUp();
}
/**
* Test that non compiled jsp files can be served.
*
* @throws Exception
*/
public void testJspNotCompiled() throws Exception {
int iter = 0;
int end = 6;
String[] lines
= fetchUrl(createUrl(String.format("/hello_not_compiled.jsp?start=%d&end=%d", iter, end)));
String iterationFormat = "<h2>Iteration %d</h2>";
for (String line : lines) {
System.out.println(line);
if (!line.contains("Iteration")) {
continue;
}
assertEquals(line.trim(), String.format(iterationFormat, iter++));
}
assertEquals(end + 1, iter);
}
/**
* Tests that mapping a servlet to / works.
*
* @throws Exception
*/
public void testWelcomeServlet() throws Exception {
String[] lines = fetchUrl(createUrl("/"));
assertTrue(Arrays.asList(lines).contains("Hello, World!"));
}
public void testSmallTxt() throws Exception {
String[] lines = fetchUrl(createUrl("/small.txt"));
assertEquals("zero", lines[0]);
assertEquals("one", lines[1]);
assertEquals("two", lines[2]);
}
public void testBigTxt() throws Exception {
String[] lines = fetchUrl(createUrl("/big.txt"));
assertEquals("0", lines[0]);
assertEquals("4095", lines[4095]);
}
public void ignore_testHugeTxt() throws Exception {
System.err.println("Expect: java.io.IOException: Max response size exceeded.");
HttpURLConnection connection = (HttpURLConnection) createUrl("/huge.txt").openConnection();
connection.connect();
assertEquals(500, connection.getResponseCode());
}
/**
* Tests that app.yaml is protected
*/
public void testAppYamlHidden() throws Exception {
HttpURLConnection connection = (HttpURLConnection) createUrl("/app.yaml").openConnection();
connection.connect();
assertEquals(404, connection.getResponseCode());
}
/**
* Test that the API Proxy was configured by the VmRuntimeFilter.
*
* @throws Exception
*/
public void testApiProxyInstall() throws Exception {
assertNotNull(ApiProxy.getDelegate());
assertEquals(VmApiProxyDelegate.class.getCanonicalName(),
ApiProxy.getDelegate().getClass().getCanonicalName());
}
/**
* Test that the thread local environment is set up on each request.
*
* @throws Exception
*/
public void testEnvironmentInstall() throws Exception {
String[] lines = fetchUrl(createUrl("/CurrentEnvironmentAccessor"));
List<String> expectedLines = Arrays.asList(
"testpartition~google.com:test-project",
"testbackend",
"testversion.0");
assertEquals(expectedLines, Arrays.asList(lines));
}
/**
* Test that the health check servlet was loaded and responds with "ok" with
* the proper version provided.
*
* @throws Exception
*/
public void testHealthOK() throws Exception {
String[] lines = fetchUrl(createUrl("/_ah/health"));
assertEquals(1, lines.length);
assertEquals("ok", lines[0].trim());
}
public void testAsyncRequests_WaitUntilDone() throws Exception {
long sleepTime = 2000;
FakeableVmApiProxyDelegate fakeApiProxy = new FakeableVmApiProxyDelegate();
ApiProxy.setDelegate(fakeApiProxy);
HttpClient httpClient = new HttpClient();
httpClient.getHttpConnectionManager().getParams().setConnectionTimeout(5000);
GetMethod get = new GetMethod(createUrl("/sleep").toString());
get.addRequestHeader("Use-Async-Sleep-Api", "true");
get.addRequestHeader("Sleep-Time", Long.toString(sleepTime));
long startTime = System.currentTimeMillis();
int httpCode = httpClient.executeMethod(get);
assertEquals(200, httpCode);
Header vmApiWaitTime = get.getResponseHeader(VmRuntimeUtils.ASYNC_API_WAIT_HEADER);
assertNotNull(vmApiWaitTime);
assertTrue(Integer.parseInt(vmApiWaitTime.getValue()) > 0);
long elapsed = System.currentTimeMillis() - startTime;
assertTrue(elapsed >= sleepTime);
}
/**
* Test that all AppEngine specific system properties are set up when the
* VmRuntimeFilter is initialized.
*
* @throws Exception
*/
public void testSystemProperties() throws Exception {
String[] lines = fetchUrl(createUrl("/printSystemProperties"));
assertEquals(7, lines.length);
assertEquals("sysprop1 value", lines[0]);
assertEquals("sysprop2 value", lines[1]);
assertEquals("null", lines[2]);
assertEquals(SystemProperty.Environment.Value.Production.name(), lines[3]);
assertTrue(lines[4].startsWith("Google App Engine/"));
assertEquals(PROJECT, lines[5]);
assertEquals(VERSION + ".0", lines[6]);
}
/**
* Test that the warmup handler is installed.
*/
public void testWarmup() throws Exception {
String[] lines = fetchUrl(createUrl("/_ah/warmup")); // fetchUrl() fails on non-OK return codes.
assertEquals(0, lines.length);
}
/**
* Test that sessions are disabled. Disabling sessions means that the default HashSessionManager
* is being used, which keeps sessions in memory only. Enabling sessions uses the appengine SessionManager
* which will use Datastore and memcache as persistent backing stores.
*
* @throws Exception
*/
public void testSessions() throws Exception {
for (int i = 1; i <= 5; i++) {
String[] lines = fetchUrl(createUrl("/count?type=session"));
assertEquals(1, lines.length);
assertEquals("1", lines[0]); // We're not passing in any session cookie so each request is a fresh session.
}
}
public void testSsl_NoSSL() throws Exception {
HttpClient httpClient = new HttpClient();
httpClient.getHttpConnectionManager().getParams().setConnectionTimeout(30000);
GetMethod get = new GetMethod(createUrl("/test-ssl").toString());
int httpCode = httpClient.executeMethod(get);
assertEquals(200, httpCode);
String expected = "false:http:http://localhost:"+port+"/test-ssl";
assertEquals(expected, get.getResponseBodyAsString());
}
public void testSsl_WithSSL() throws Exception {
HttpClient httpClient = new HttpClient();
httpClient.getHttpConnectionManager().getParams().setConnectionTimeout(30000);
GetMethod get = new GetMethod(createUrl("/test-ssl").toString());
get.addRequestHeader(VmApiProxyEnvironment.HTTPS_HEADER, "on");
int httpCode = httpClient.executeMethod(get);
assertEquals(200, httpCode);
assertEquals("true:https:https://localhost/test-ssl", get.getResponseBodyAsString());
}
public void testWithUntrustedInboundIp() throws Exception {
HttpClient httpClient = new HttpClient();
httpClient.getHttpConnectionManager().getParams().setConnectionTimeout(30000);
GetMethod get = new GetMethod(createUrlForHostIP("/test-ssl").toString());
int httpCode = httpClient.executeMethod(get);
assertEquals(200, httpCode);
}
protected int fetchResponseCode(URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
return connection.getResponseCode();
}
public void testShutDown() throws Exception {
int code = fetchResponseCode(createUrl("/_ah/health"));
assertEquals(HttpServletResponse.SC_OK, code);
// Send a request to /_ah/stop to trigger lameduck.
String[] lines = fetchUrl(createUrl("/_ah/stop"));
assertEquals(1, lines.length);
assertEquals("ok", lines[0].trim());
code = fetchResponseCode(createUrl("/_ah/health"));
assertEquals(HttpServletResponse.SC_BAD_GATEWAY, code);
}
}
| |
package org.hisp.dhis.datavalue;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.Collection;
import java.util.HashSet;
import org.hisp.dhis.DhisSpringTest;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataelement.DataElementCategoryService;
import org.hisp.dhis.dataelement.DataElementService;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author Kristian Nordal
* @version $Id: DataValueServiceTest.java 5715 2008-09-17 14:05:28Z larshelg $
*/
public class DataValueServiceTest
extends DhisSpringTest
{
@Autowired
private DataElementCategoryService categoryService;
@Autowired
private DataElementService dataElementService;
@Autowired
private DataValueService dataValueService;
@Autowired
private PeriodService periodService;
@Autowired
private OrganisationUnitService organisationUnitService;
// -------------------------------------------------------------------------
// Supporting data
// -------------------------------------------------------------------------
private DataElement dataElementA;
private DataElement dataElementB;
private DataElement dataElementC;
private DataElement dataElementD;
private DataElementCategoryOptionCombo optionCombo;
private Period periodA;
private Period periodB;
private Period periodC;
private Period periodD;
private OrganisationUnit sourceA;
private OrganisationUnit sourceB;
private OrganisationUnit sourceC;
private OrganisationUnit sourceD;
// -------------------------------------------------------------------------
// Set up/tear down
// -------------------------------------------------------------------------
@Override
public void setUpTest()
throws Exception
{
// ---------------------------------------------------------------------
// Add supporting data
// ---------------------------------------------------------------------
dataElementA = createDataElement( 'A' );
dataElementB = createDataElement( 'B' );
dataElementC = createDataElement( 'C' );
dataElementD = createDataElement( 'D' );
dataElementService.addDataElement( dataElementA );
dataElementService.addDataElement( dataElementB );
dataElementService.addDataElement( dataElementC );
dataElementService.addDataElement( dataElementD );
periodA = createPeriod( getDay( 5 ), getDay( 6 ) );
periodB = createPeriod( getDay( 6 ), getDay( 7 ) );
periodC = createPeriod( getDay( 7 ), getDay( 8 ) );
periodD = createPeriod( getDay( 8 ), getDay( 9 ) );
sourceA = createOrganisationUnit( 'A' );
sourceB = createOrganisationUnit( 'B' );
sourceC = createOrganisationUnit( 'C' );
sourceD = createOrganisationUnit( 'D' );
organisationUnitService.addOrganisationUnit( sourceA );
organisationUnitService.addOrganisationUnit( sourceB );
organisationUnitService.addOrganisationUnit( sourceC );
organisationUnitService.addOrganisationUnit( sourceD );
optionCombo = categoryService.getDefaultDataElementCategoryOptionCombo();
}
// -------------------------------------------------------------------------
// Basic DataValue
// -------------------------------------------------------------------------
@Test
public void testAddDataValue()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceA, optionCombo, optionCombo );
dataValueC.setValue( "3" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueA = dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo );
assertNotNull( dataValueA );
assertNotNull( dataValueA.getCreated() );
assertEquals( sourceA.getId(), dataValueA.getSource().getId() );
assertEquals( dataElementA, dataValueA.getDataElement() );
assertEquals( periodA, dataValueA.getPeriod() );
assertEquals( "1", dataValueA.getValue() );
dataValueB = dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo );
assertNotNull( dataValueB );
assertNotNull( dataValueB.getCreated() );
assertEquals( sourceA.getId(), dataValueB.getSource().getId() );
assertEquals( dataElementB, dataValueB.getDataElement() );
assertEquals( periodA, dataValueB.getPeriod() );
assertEquals( "2", dataValueB.getValue() );
dataValueC = dataValueService.getDataValue( dataElementC, periodC, sourceA, optionCombo );
assertNotNull( dataValueC );
assertNotNull( dataValueC.getCreated() );
assertEquals( sourceA.getId(), dataValueC.getSource().getId() );
assertEquals( dataElementC, dataValueC.getDataElement() );
assertEquals( periodC, dataValueC.getPeriod() );
assertEquals( "3", dataValueC.getValue() );
}
@Test
public void testUpdataDataValue()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceB, optionCombo, optionCombo );
dataValueB.setValue( "2" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
assertNotNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementB, periodA, sourceB, optionCombo ) );
dataValueA.setValue( "5" );
dataValueService.updateDataValue( dataValueA );
dataValueA = dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo );
assertNotNull( dataValueA );
assertEquals( "5", dataValueA.getValue() );
dataValueB = dataValueService.getDataValue( dataElementB, periodA, sourceB, optionCombo );
assertNotNull( dataValueB );
assertEquals( "2", dataValueB.getValue() );
}
@Test
public void testDeleteAndGetDataValue()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementD, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
assertNotNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValue( dataValueA );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValue( dataValueB );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValue( dataValueC );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValue( dataValueD );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
}
@Test
public void testDeleteDataValuesBySource()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementD, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
assertNotNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesBySource( sourceA );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesBySource( sourceB );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesBySource( sourceC );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesBySource( sourceD );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
}
@Test
public void testDeleteDataValuesByDataElement()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementD, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
assertNotNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesByDataElement( dataElementA );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesByDataElement( dataElementB );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesByDataElement( dataElementC );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNotNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
dataValueService.deleteDataValuesByDataElement( dataElementD );
assertNull( dataValueService.getDataValue( dataElementA, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementB, periodA, sourceA, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementC, periodC, sourceD, optionCombo ) );
assertNull( dataValueService.getDataValue( dataElementD, periodC, sourceB, optionCombo ) );
}
// -------------------------------------------------------------------------
// Collections of DataValues
// -------------------------------------------------------------------------
@Test
public void testGetAllDataValues()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementD, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
Collection<DataValue> dataValues = dataValueService.getAllDataValues();
assertNotNull( dataValues );
assertEquals( 4, dataValues.size() );
}
@Test
public void testGetDataValuesSourcePeriod()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementD, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
Collection<DataValue> dataValues = dataValueService.getDataValues( sourceA, periodA );
assertNotNull( dataValues );
assertEquals( 2, dataValues.size() );
dataValues = dataValueService.getDataValues( sourceB, periodC );
assertNotNull( dataValues );
assertEquals( 1, dataValues.size() );
dataValues = dataValueService.getDataValues( sourceB, periodD );
assertNotNull( dataValues );
assertEquals( 0, dataValues.size() );
}
@Test
public void testGetDataValuesSourceDataElement()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementD, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
Collection<DataValue> dataValues = dataValueService.getDataValues( sourceA, dataElementA );
assertNotNull( dataValues );
assertEquals( 1, dataValues.size() );
dataValues = dataValueService.getDataValues( sourceA, dataElementB );
assertNotNull( dataValues );
assertEquals( 1, dataValues.size() );
dataValues = dataValueService.getDataValues( sourceA, dataElementC );
assertNotNull( dataValues );
assertEquals( 0, dataValues.size() );
}
@Test
public void testGetDataValuesSourcesDataElement()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementA, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
Collection<OrganisationUnit> sources = new HashSet<>();
sources.add( sourceA );
sources.add( sourceB );
Collection<DataValue> dataValues = dataValueService.getDataValues( sources, dataElementA );
assertNotNull( dataValues );
assertEquals( 2, dataValues.size() );
dataValues = dataValueService.getDataValues( sources, dataElementB );
assertNotNull( dataValues );
assertEquals( 1, dataValues.size() );
dataValues = dataValueService.getDataValues( sources, dataElementC );
assertNotNull( dataValues );
assertEquals( 0, dataValues.size() );
}
@Test
public void testGetDataValuesSourcePeriodDataElements()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceA, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementB, periodA, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementC, periodC, sourceD, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementA, periodC, sourceB, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
Collection<DataElement> dataElements = new HashSet<>();
dataElements.add( dataElementA );
dataElements.add( dataElementB );
Collection<DataValue> dataValues = dataValueService.getDataValues( sourceA, periodA, dataElements );
assertNotNull( dataValues );
assertEquals( 2, dataValues.size() );
dataValues = dataValueService.getDataValues( sourceB, periodC, dataElements );
assertNotNull( dataValues );
assertEquals( 1, dataValues.size() );
dataValues = dataValueService.getDataValues( sourceD, periodC, dataElements );
assertNotNull( dataValues );
assertEquals( 0, dataValues.size() );
}
@Test
public void testGetDataValuesDataElementPeriodsSources()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceB, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementA, periodB, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementA, periodA, sourceC, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementB, periodB, sourceD, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
Collection<Period> periods = new HashSet<>();
periods.add( periodA );
periods.add( periodB );
Collection<OrganisationUnit> sources = new HashSet<>();
sources.add( sourceA );
sources.add( sourceB );
Collection<DataValue> dataValues = dataValueService.getDataValues( dataElementA, periods, sources );
assertEquals( dataValues.size(), 2 );
assertTrue( dataValues.contains( dataValueA ) );
assertTrue( dataValues.contains( dataValueB ) );
}
@Test
public void testGetDataValuesOptionComboDataElementPeriodsSources()
throws Exception
{
DataValue dataValueA = new DataValue( dataElementA, periodA, sourceB, optionCombo, optionCombo );
dataValueA.setValue( "1" );
DataValue dataValueB = new DataValue( dataElementA, periodB, sourceA, optionCombo, optionCombo );
dataValueB.setValue( "2" );
DataValue dataValueC = new DataValue( dataElementA, periodA, sourceC, optionCombo, optionCombo );
dataValueC.setValue( "3" );
DataValue dataValueD = new DataValue( dataElementB, periodB, sourceD, optionCombo, optionCombo );
dataValueD.setValue( "4" );
dataValueService.addDataValue( dataValueA );
dataValueService.addDataValue( dataValueB );
dataValueService.addDataValue( dataValueC );
dataValueService.addDataValue( dataValueD );
Collection<Period> periods = new HashSet<>();
periods.add( periodA );
periods.add( periodB );
Collection<OrganisationUnit> sources = new HashSet<>();
sources.add( sourceA );
sources.add( sourceB );
Collection<DataValue> dataValues = dataValueService.getDataValues( dataElementA, optionCombo, periods, sources );
assertEquals( dataValues.size(), 2 );
assertTrue( dataValues.contains( dataValueA ) );
assertTrue( dataValues.contains( dataValueB ) );
}
}
| |
//%LICENSE////////////////////////////////////////////////////////////////
//
// Licensed to The Open Group (TOG) under one or more contributor license
// agreements. Refer to the OpenPegasusNOTICE.txt file distributed with
// this work for additional information regarding copyright ownership.
// Each contributor licenses this file to you under the OpenPegasus Open
// Source License; you may not use this file except in compliance with the
// License.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//////////////////////////////////////////////////////////////////////////
package Client;
import org.pegasus.jmpi.CIMClass;
import org.pegasus.jmpi.CIMDataType;
import org.pegasus.jmpi.CIMDateTime;
import org.pegasus.jmpi.CIMObjectPath;
import org.pegasus.jmpi.UnsignedInt8;
import org.pegasus.jmpi.UnsignedInt16;
import org.pegasus.jmpi.UnsignedInt32;
import org.pegasus.jmpi.UnsignedInt64;
import org.pegasus.jmpi.CIMException;
import org.pegasus.jmpi.CIMClient;
public class testCIMDataType
{
private boolean DEBUG = false;
/**
* This returns the group name.
*
* @return String "class" testcase belongs in.
*/
public String getGroup ()
{
return "CIMDataType";
}
public void setDebug (boolean fDebug)
{
DEBUG = fDebug;
}
public boolean main (String args[], CIMClient cimClient)
{
boolean fExecuted = false;
boolean fRet = true;
for (int i = 0; i < args.length; i++)
{
if (args[i].equalsIgnoreCase ("debug"))
{
setDebug (true);
}
}
if (!fExecuted)
{
fRet = testCIMDataType (cimClient);
}
return fRet;
}
private boolean testCIMDataType (CIMClient client)
{
if (client == null)
{
System.out.println ("FAILURE: testCIMDataType: client == null");
return false;
}
// -----
CIMObjectPath cop = null;
CIMClass cc = null;
cop = new CIMObjectPath ("JMPIExpInstance_TestPropertyTypes",
"root/SampleProvider");
if (DEBUG)
{
System.out.println ("testCIMDataType: cop = " + cop);
}
try
{
cc = client.getClass (cop,
true, // propagated
true, // includeQualifiers
true, // includeClassOrigin
null); // propertyList
}
catch (CIMException e)
{
System.out.println ("FAILURE: testCIMDataType: client.getClass: caught " + e);
return false;
}
if (DEBUG)
{
System.out.println ("testCIMDataType: cc = " + cc);
}
if (cc == null)
{
System.out.println ("FAILURE: testCIMDataType: cc == null");
return false;
}
// -----
String aszPropertyNames[] = {
"PropertyString",
"PropertyUint8",
"PropertyUint16",
"PropertyUint32",
"PropertyUint64",
"PropertySint8",
"PropertySint16",
"PropertySint32",
"PropertySint64",
"PropertyBoolean",
"PropertyReal32",
"PropertyReal64",
"PropertyDatetime",
"PropertyChar16",
"PropertyArrayUint8",
"PropertyArrayUint16",
"PropertyArrayUint32",
"PropertyArrayUint64",
"PropertyArraySint8",
"PropertyArraySint16",
"PropertyArraySint32",
"PropertyArraySint64",
"PropertyArrayBoolean",
"PropertyArrayReal32",
"PropertyArrayReal64",
"PropertyArrayDatetime",
"PropertyArrayChar16"
};
int aiDataTypes[] = {
CIMDataType.STRING,
CIMDataType.UINT8,
CIMDataType.UINT16,
CIMDataType.UINT32,
CIMDataType.UINT64,
CIMDataType.SINT8,
CIMDataType.SINT16,
CIMDataType.SINT32,
CIMDataType.SINT64,
CIMDataType.BOOLEAN,
CIMDataType.REAL32,
CIMDataType.REAL64,
CIMDataType.DATETIME,
CIMDataType.CHAR16,
CIMDataType.UINT8,
CIMDataType.UINT16,
CIMDataType.UINT32,
CIMDataType.UINT64,
CIMDataType.SINT8,
CIMDataType.SINT16,
CIMDataType.SINT32,
CIMDataType.SINT64,
CIMDataType.BOOLEAN,
CIMDataType.REAL32,
CIMDataType.REAL64,
CIMDataType.DATETIME,
CIMDataType.CHAR16
};
boolean afIsArrays[] = {
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
};
int aiArraySizes[] = {
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
};
if ( aszPropertyNames.length != aiDataTypes.length
|| aiDataTypes.length != afIsArrays.length
|| afIsArrays.length != aiArraySizes.length
|| aiArraySizes.length != aszPropertyNames.length
)
{
System.out.println ("FAILURE: testCIMDataType: arrays not the same length (1)");
return false;
}
for ( int i = 0;
i < aszPropertyNames.length
&& i < aiDataTypes.length
&& i < afIsArrays.length
&& i < aiArraySizes.length
;
i++
)
{
CIMDataType cdt = cc.getProperty (aszPropertyNames[i]).getType ();
if (cdt.getType () != aiDataTypes[i])
{
System.out.println ("FAILURE: testCIMDataType: cdt.getType () for " + aszPropertyNames[i]);
return false;
}
if (cdt.isArrayType () != afIsArrays[i])
{
System.out.println ("FAILURE: testCIMDataType: cdt.isArrayType () for " + aszPropertyNames[i]);
return false;
}
if (cdt.getSize () != aiArraySizes[i])
{
System.out.println ("FAILURE: testCIMDataType: cdt.getSize () for " + aszPropertyNames[i]);
return false;
}
}
// -----
System.out.println ("SUCCESS: testCIMDataType");
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.commons.junit;
import static org.slf4j.Logger.ROOT_LOGGER_NAME;
import java.util.List;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.Appender;
import ch.qos.logback.core.AppenderBase;
import com.google.common.collect.Lists;
/**
* The LogCustomizer allows to enable log level for a specific logger and/or
* filter the received logs this logger on a dedicated log level
*
* <pre>
* public class ConflictResolutionTest {
*
* private final LogCustomizer customLogs = LogCustomizer
* .forLogger(
* "org.apache.jackrabbit.oak.plugins.commit.MergingNodeStateDiff")
* .enable(Level.DEBUG).create();
*
* @Before
* public void setup() throws RepositoryException {
* customLogs.starting();
* }
*
* @After
* public void after() {
* customLogs.finished();
* }
*
* @Test
* public void test() {
* List<String> myLogs = customLogs.getLogs();
* assertTrue(myLogs.size() == 1);
* }
*
* }
* </pre>
*/
public class LogCustomizer {
public static LogCustomizerBuilder forRootLogger() {
return forLogger(ROOT_LOGGER_NAME);
}
public static LogCustomizerBuilder forLogger(String name) {
return new LogCustomizerBuilder(name);
}
public static LogCustomizerBuilder forLogger(Class<?> clazz) {
return new LogCustomizerBuilder(clazz.getName());
}
public static class LogCustomizerBuilder {
private final String name;
private Level enableLevel;
private Level filterLevel;
private String matchExactMessage;
private String matchContainsMessage;
private String matchRegexMessage;
private LogCustomizerBuilder(String name) {
this.name = name;
}
public LogCustomizerBuilder enable(Level level) {
this.enableLevel = level;
return this;
}
public LogCustomizerBuilder enable(org.slf4j.event.Level level) {
this.enableLevel = fromSlf4jLevel(level);
return this;
}
public LogCustomizerBuilder filter(Level level) {
this.filterLevel = level;
return this;
}
public LogCustomizerBuilder filter(org.slf4j.event.Level level) {
this.filterLevel = fromSlf4jLevel(level);
return this;
}
public LogCustomizerBuilder exactlyMatches(String message) {
this.matchExactMessage = message;
return this;
}
public LogCustomizerBuilder contains(String message) {
this.matchContainsMessage = message;
return this;
}
public LogCustomizerBuilder matchesRegex(String message) {
this.matchRegexMessage = message;
return this;
}
public LogCustomizer create() {
return new LogCustomizer(name, enableLevel, filterLevel, matchExactMessage, matchContainsMessage, matchRegexMessage);
}
private static Level fromSlf4jLevel(org.slf4j.event.Level level) {
switch (level) {
case DEBUG:
return Level.DEBUG;
case ERROR:
return Level.ERROR;
case INFO:
return Level.INFO;
case TRACE:
return Level.TRACE;
case WARN:
return Level.WARN;
default:
throw new IllegalArgumentException("Log level not supported: " + level);
}
}
}
private final Logger logger;
private final List<String> logs = Lists.newArrayList();
private final Level enableLevel;
private final Level originalLevel;
private final Appender<ILoggingEvent> customLogger;
private LogCustomizer(String name, Level enableLevel,
final Level filterLevel,
final String matchExactMessage, final String matchContainsMessage, final String matchRegexMessage) {
this.logger = getLogger(name);
if (enableLevel != null) {
this.enableLevel = enableLevel;
this.originalLevel = logger.getLevel();
} else {
this.enableLevel = null;
this.originalLevel = null;
}
customLogger = new AppenderBase<ILoggingEvent>() {
@Override
protected void append(ILoggingEvent e) {
boolean logLevelOk = false;
if (filterLevel == null) {
logLevelOk = true;
} else if (e.getLevel().isGreaterOrEqual(filterLevel)) {
logLevelOk = true;
}
if(logLevelOk) {
boolean messageMatchOk = true;
String message = e.getFormattedMessage();
if (messageMatchOk && matchExactMessage != null && !matchExactMessage.equals(message)) {
messageMatchOk = false;
}
if (messageMatchOk && matchContainsMessage != null && !message.contains(matchContainsMessage)) {
messageMatchOk = false;
}
if (messageMatchOk && matchRegexMessage != null && !message.matches(matchRegexMessage)) {
messageMatchOk = false;
}
if (messageMatchOk) {
logs.add(e.getFormattedMessage());
}
}
}
};
}
private static Logger getLogger(String name) {
return ((LoggerContext) LoggerFactory.getILoggerFactory())
.getLogger(name);
}
public List<String> getLogs() {
return logs;
}
public void starting() {
customLogger.start();
if (enableLevel != null) {
logger.setLevel(enableLevel);
}
logger.addAppender(customLogger);
}
public void finished() {
if (originalLevel != null) {
logger.setLevel(originalLevel);
}
logger.detachAppender(customLogger);
customLogger.stop();
logs.clear();
}
}
| |
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.MockitoAnnotations;
import org.mockito.verification.VerificationMode;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.atMost;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.same;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class WeightedFairQueueByteDistributorTest extends AbstractWeightedFairQueueByteDistributorDependencyTest {
private static final int STREAM_A = 1;
private static final int STREAM_B = 3;
private static final int STREAM_C = 5;
private static final int STREAM_D = 7;
private static final int STREAM_E = 9;
private static final int ALLOCATION_QUANTUM = 100;
@Before
public void setup() throws Http2Exception {
MockitoAnnotations.initMocks(this);
// Assume we always write all the allocated bytes.
doAnswer(writeAnswer(false)).when(writer).write(any(Http2Stream.class), anyInt());
setup(-1);
}
private void setup(int maxStateOnlySize) throws Http2Exception {
connection = new DefaultHttp2Connection(false);
distributor = maxStateOnlySize >= 0 ? new WeightedFairQueueByteDistributor(connection, maxStateOnlySize)
: new WeightedFairQueueByteDistributor(connection);
distributor.allocationQuantum(ALLOCATION_QUANTUM);
connection.local().createStream(STREAM_A, false);
connection.local().createStream(STREAM_B, false);
Http2Stream streamC = connection.local().createStream(STREAM_C, false);
Http2Stream streamD = connection.local().createStream(STREAM_D, false);
setPriority(streamC.id(), STREAM_A, DEFAULT_PRIORITY_WEIGHT, false);
setPriority(streamD.id(), STREAM_A, DEFAULT_PRIORITY_WEIGHT, false);
}
/**
* In this test, we block B such that it has no frames. We distribute enough bytes for all streams and stream B
* should be preserved in the priority queue structure until it has no "active" children, but it should not be
* doubly added to stream 0.
*
* <pre>
* 0
* |
* A
* |
* [B]
* |
* C
* |
* D
* </pre>
*
* After the write:
* <pre>
* 0
* </pre>
*/
@Test
public void writeWithNonActiveStreamShouldNotDobuleAddToPriorityQueue() throws Http2Exception {
initState(STREAM_A, 400, true);
initState(STREAM_B, 500, true);
initState(STREAM_C, 600, true);
initState(STREAM_D, 700, true);
setPriority(STREAM_B, STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
setPriority(STREAM_D, STREAM_C, DEFAULT_PRIORITY_WEIGHT, true);
// Block B, but it should still remain in the queue/tree structure.
initState(STREAM_B, 0, false);
// Get the streams before the write, because they may be be closed.
Http2Stream streamA = stream(STREAM_A);
Http2Stream streamB = stream(STREAM_B);
Http2Stream streamC = stream(STREAM_C);
Http2Stream streamD = stream(STREAM_D);
reset(writer);
doAnswer(writeAnswer(true)).when(writer).write(any(Http2Stream.class), anyInt());
assertFalse(write(400 + 600 + 700));
assertEquals(400, captureWrites(streamA));
verifyNeverWrite(streamB);
assertEquals(600, captureWrites(streamC));
assertEquals(700, captureWrites(streamD));
}
@Test
public void bytesUnassignedAfterProcessing() throws Http2Exception {
initState(STREAM_A, 1, true);
initState(STREAM_B, 2, true);
initState(STREAM_C, 3, true);
initState(STREAM_D, 4, true);
assertFalse(write(10));
verifyWrite(STREAM_A, 1);
verifyWrite(STREAM_B, 2);
verifyWrite(STREAM_C, 3);
verifyWrite(STREAM_D, 4);
assertFalse(write(10));
verifyAnyWrite(STREAM_A, 1);
verifyAnyWrite(STREAM_B, 1);
verifyAnyWrite(STREAM_C, 1);
verifyAnyWrite(STREAM_D, 1);
}
@Test
public void connectionErrorForWriterException() throws Http2Exception {
initState(STREAM_A, 1, true);
initState(STREAM_B, 2, true);
initState(STREAM_C, 3, true);
initState(STREAM_D, 4, true);
Exception fakeException = new RuntimeException("Fake exception");
doThrow(fakeException).when(writer).write(same(stream(STREAM_C)), eq(3));
try {
write(10);
fail("Expected an exception");
} catch (Http2Exception e) {
assertFalse(Http2Exception.isStreamError(e));
assertEquals(Http2Error.INTERNAL_ERROR, e.error());
assertSame(fakeException, e.getCause());
}
verifyWrite(atMost(1), STREAM_A, 1);
verifyWrite(atMost(1), STREAM_B, 2);
verifyWrite(STREAM_C, 3);
verifyWrite(atMost(1), STREAM_D, 4);
doAnswer(writeAnswer(false)).when(writer).write(same(stream(STREAM_C)), eq(3));
assertFalse(write(10));
verifyWrite(STREAM_A, 1);
verifyWrite(STREAM_B, 2);
verifyWrite(times(2), STREAM_C, 3);
verifyWrite(STREAM_D, 4);
}
/**
* In this test, we verify that each stream is allocated a minimum chunk size. When bytes
* run out, the remaining streams will be next in line for the next iteration.
*/
@Test
public void minChunkShouldBeAllocatedPerStream() throws Http2Exception {
// Re-assign weights.
setPriority(STREAM_A, 0, (short) 50, false);
setPriority(STREAM_B, 0, (short) 200, false);
setPriority(STREAM_C, STREAM_A, (short) 100, false);
setPriority(STREAM_D, STREAM_A, (short) 100, false);
// Update the streams.
initState(STREAM_A, ALLOCATION_QUANTUM, true);
initState(STREAM_B, ALLOCATION_QUANTUM, true);
initState(STREAM_C, ALLOCATION_QUANTUM, true);
initState(STREAM_D, ALLOCATION_QUANTUM, true);
// Only write 3 * chunkSize, so that we'll only write to the first 3 streams.
int written = 3 * ALLOCATION_QUANTUM;
assertTrue(write(written));
assertEquals(ALLOCATION_QUANTUM, captureWrites(STREAM_A));
assertEquals(ALLOCATION_QUANTUM, captureWrites(STREAM_B));
assertEquals(ALLOCATION_QUANTUM, captureWrites(STREAM_C));
verifyWrite(atMost(1), STREAM_D, 0);
// Now write again and verify that the last stream is written to.
assertFalse(write(ALLOCATION_QUANTUM));
assertEquals(ALLOCATION_QUANTUM, captureWrites(STREAM_A));
assertEquals(ALLOCATION_QUANTUM, captureWrites(STREAM_B));
assertEquals(ALLOCATION_QUANTUM, captureWrites(STREAM_C));
assertEquals(ALLOCATION_QUANTUM, captureWrites(STREAM_D));
}
/**
* In this test, we verify that the highest priority frame which has 0 bytes to send, but an empty frame is able
* to send that empty frame.
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* 0
* |
* A
* |
* B
* / \
* C D
* </pre>
*/
@Test
public void emptyFrameAtHeadIsWritten() throws Http2Exception {
initState(STREAM_A, 0, true);
initState(STREAM_B, 0, true);
initState(STREAM_C, 0, true);
initState(STREAM_D, 10, true);
setPriority(STREAM_B, STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
assertFalse(write(10));
verifyWrite(STREAM_A, 0);
verifyWrite(STREAM_B, 0);
verifyWrite(STREAM_C, 0);
verifyWrite(STREAM_D, 10);
}
/**
* In this test, we block A which allows bytes to be written by C and D. Here's a view of the tree (stream A is
* blocked).
*
* <pre>
* 0
* / \
* [A] B
* / \
* C D
* </pre>
*/
@Test
public void blockedStreamNoDataShouldSpreadDataToChildren() throws Http2Exception {
blockedStreamShouldSpreadDataToChildren(false);
}
/**
* In this test, we block A and also give it an empty data frame to send.
* All bytes should be delegated to by C and D. Here's a view of the tree (stream A is blocked).
*
* <pre>
* 0
* / \
* [A](0) B
* / \
* C D
* </pre>
*/
@Test
public void blockedStreamWithDataAndNotAllowedToSendShouldSpreadDataToChildren() throws Http2Exception {
// A cannot stream.
initState(STREAM_A, 0, true, false);
blockedStreamShouldSpreadDataToChildren(false);
}
/**
* In this test, we allow A to send, but expect the flow controller will only write to the stream 1 time.
* This is because we give the stream a chance to write its empty frame 1 time, and the stream will not
* be written to again until a update stream is called.
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*/
@Test
public void streamWithZeroFlowControlWindowAndDataShouldWriteOnlyOnce() throws Http2Exception {
initState(STREAM_A, 0, true, true);
blockedStreamShouldSpreadDataToChildren(true);
// Make sure if we call update stream again, A should write 1 more time.
initState(STREAM_A, 0, true, true);
assertFalse(write(1));
verifyWrite(times(2), STREAM_A, 0);
// Try to write again, but since no initState A should not write again
assertFalse(write(1));
verifyWrite(times(2), STREAM_A, 0);
}
private void blockedStreamShouldSpreadDataToChildren(boolean streamAShouldWriteZero) throws Http2Exception {
initState(STREAM_B, 10, true);
initState(STREAM_C, 10, true);
initState(STREAM_D, 10, true);
// Write up to 10 bytes.
assertTrue(write(10));
if (streamAShouldWriteZero) {
verifyWrite(STREAM_A, 0);
} else {
verifyNeverWrite(STREAM_A);
}
verifyWrite(atMost(1), STREAM_C, 0);
verifyWrite(atMost(1), STREAM_D, 0);
// B is entirely written
verifyWrite(STREAM_B, 10);
// Now test that writes get delegated from A (which is blocked) to its children
assertTrue(write(5));
if (streamAShouldWriteZero) {
verifyWrite(times(1), STREAM_A, 0);
} else {
verifyNeverWrite(STREAM_A);
}
verifyWrite(STREAM_D, 5);
verifyWrite(atMost(1), STREAM_C, 0);
assertTrue(write(5));
if (streamAShouldWriteZero) {
verifyWrite(times(1), STREAM_A, 0);
} else {
verifyNeverWrite(STREAM_A);
}
assertEquals(10, captureWrites(STREAM_C) + captureWrites(STREAM_D));
assertTrue(write(5));
assertFalse(write(5));
if (streamAShouldWriteZero) {
verifyWrite(times(1), STREAM_A, 0);
} else {
verifyNeverWrite(STREAM_A);
}
verifyWrite(times(2), STREAM_C, 5);
verifyWrite(times(2), STREAM_D, 5);
}
/**
* In this test, we block B which allows all bytes to be written by A. A should not share the data with its children
* since it's not blocked.
*
* <pre>
* 0
* / \
* A [B]
* / \
* C D
* </pre>
*/
@Test
public void childrenShouldNotSendDataUntilParentBlocked() throws Http2Exception {
// B cannot stream.
initState(STREAM_A, 10, true);
initState(STREAM_C, 10, true);
initState(STREAM_D, 10, true);
// Write up to 10 bytes.
assertTrue(write(10));
// A is assigned all of the bytes.
verifyWrite(STREAM_A, 10);
verifyNeverWrite(STREAM_B);
verifyWrite(atMost(1), STREAM_C, 0);
verifyWrite(atMost(1), STREAM_D, 0);
}
/**
* In this test, we block B which allows all bytes to be written by A. Once A is complete, it will spill over the
* remaining of its portion to its children.
*
* <pre>
* 0
* / \
* A [B]
* / \
* C D
* </pre>
*/
@Test
public void parentShouldWaterFallDataToChildren() throws Http2Exception {
// B cannot stream.
initState(STREAM_A, 5, true);
initState(STREAM_C, 10, true);
initState(STREAM_D, 10, true);
// Write up to 10 bytes.
assertTrue(write(10));
verifyWrite(STREAM_A, 5);
verifyNeverWrite(STREAM_B);
verifyWrite(STREAM_C, 5);
verifyNeverWrite(STREAM_D);
assertFalse(write(15));
verifyAnyWrite(STREAM_A, 1);
verifyNeverWrite(STREAM_B);
verifyWrite(times(2), STREAM_C, 5);
verifyWrite(STREAM_D, 10);
}
/**
* In this test, we verify re-prioritizing a stream. We start out with B blocked:
*
* <pre>
* 0
* / \
* A [B]
* / \
* C D
* </pre>
*
* We then re-prioritize D so that it's directly off of the connection and verify that A and D split the written
* bytes between them.
*
* <pre>
* 0
* /|\
* / | \
* A [B] D
* /
* C
* </pre>
*/
@Test
public void reprioritizeShouldAdjustOutboundFlow() throws Http2Exception {
// B cannot stream.
initState(STREAM_A, 10, true);
initState(STREAM_C, 10, true);
initState(STREAM_D, 10, true);
// Re-prioritize D as a direct child of the connection.
setPriority(STREAM_D, 0, DEFAULT_PRIORITY_WEIGHT, false);
assertTrue(write(10));
verifyWrite(STREAM_A, 10);
verifyNeverWrite(STREAM_B);
verifyNeverWrite(STREAM_C);
verifyWrite(atMost(1), STREAM_D, 0);
assertFalse(write(20));
verifyAnyWrite(STREAM_A, 1);
verifyNeverWrite(STREAM_B);
verifyWrite(STREAM_C, 10);
verifyWrite(STREAM_D, 10);
}
/**
* Test that the maximum allowed amount the flow controller allows to be sent is always fully allocated if
* the streams have at least this much data to send. See https://github.com/netty/netty/issues/4266.
* <pre>
* 0
* / | \
* / | \
* A(0) B(0) C(0)
* /
* D(> allowed to send in 1 allocation attempt)
* </pre>
*/
@Test
public void unstreamableParentsShouldFeedHungryChildren() throws Http2Exception {
// Setup the priority tree.
setPriority(STREAM_A, 0, (short) 32, false);
setPriority(STREAM_B, 0, (short) 16, false);
setPriority(STREAM_C, 0, (short) 16, false);
setPriority(STREAM_D, STREAM_A, (short) 16, false);
final int writableBytes = 100;
// Send enough so it can not be completely written out
final int expectedUnsentAmount = 1;
initState(STREAM_D, writableBytes + expectedUnsentAmount, true);
assertTrue(write(writableBytes));
verifyWrite(STREAM_D, writableBytes);
assertFalse(write(expectedUnsentAmount));
verifyWrite(STREAM_D, expectedUnsentAmount);
}
/**
* In this test, we root all streams at the connection, and then verify that data is split appropriately based on
* weight (all available data is the same).
*
* <pre>
* 0
* / / \ \
* A B C D
* </pre>
*/
@Test
public void writeShouldPreferHighestWeight() throws Http2Exception {
// Root the streams at the connection and assign weights.
setPriority(STREAM_A, 0, (short) 50, false);
setPriority(STREAM_B, 0, (short) 200, false);
setPriority(STREAM_C, 0, (short) 100, false);
setPriority(STREAM_D, 0, (short) 100, false);
initState(STREAM_A, 1000, true);
initState(STREAM_B, 1000, true);
initState(STREAM_C, 1000, true);
initState(STREAM_D, 1000, true);
// Set allocation quantum to 1 so it is easier to see the ratio of total bytes written between each stream.
distributor.allocationQuantum(1);
assertTrue(write(1000));
assertEquals(100, captureWrites(STREAM_A));
assertEquals(450, captureWrites(STREAM_B));
assertEquals(225, captureWrites(STREAM_C));
assertEquals(225, captureWrites(STREAM_D));
}
/**
* In this test, we root all streams at the connection, block streams C and D, and then verify that data is
* prioritized toward stream B which has a higher weight than stream A.
* <p>
* We also verify that the amount that is written is not uniform, and not always the allocation quantum.
*
* <pre>
* 0
* / / \ \
* A B [C] [D]
* </pre>
*/
@Test
public void writeShouldFavorPriority() throws Http2Exception {
// Root the streams at the connection and assign weights.
setPriority(STREAM_A, 0, (short) 50, false);
setPriority(STREAM_B, 0, (short) 200, false);
setPriority(STREAM_C, 0, (short) 100, false);
setPriority(STREAM_D, 0, (short) 100, false);
initState(STREAM_A, 1000, true);
initState(STREAM_B, 1000, true);
initState(STREAM_C, 1000, false);
initState(STREAM_D, 1000, false);
// Set allocation quantum to 1 so it is easier to see the ratio of total bytes written between each stream.
distributor.allocationQuantum(1);
assertTrue(write(100));
assertEquals(20, captureWrites(STREAM_A));
verifyWrite(times(20), STREAM_A, 1);
assertEquals(80, captureWrites(STREAM_B));
verifyWrite(times(0), STREAM_B, 1);
verifyNeverWrite(STREAM_C);
verifyNeverWrite(STREAM_D);
assertTrue(write(100));
assertEquals(40, captureWrites(STREAM_A));
verifyWrite(times(40), STREAM_A, 1);
assertEquals(160, captureWrites(STREAM_B));
verifyWrite(atMost(1), STREAM_B, 1);
verifyNeverWrite(STREAM_C);
verifyNeverWrite(STREAM_D);
assertTrue(write(1050));
assertEquals(250, captureWrites(STREAM_A));
verifyWrite(times(250), STREAM_A, 1);
assertEquals(1000, captureWrites(STREAM_B));
verifyWrite(atMost(2), STREAM_B, 1);
verifyNeverWrite(STREAM_C);
verifyNeverWrite(STREAM_D);
assertFalse(write(750));
assertEquals(1000, captureWrites(STREAM_A));
verifyWrite(times(1), STREAM_A, 750);
assertEquals(1000, captureWrites(STREAM_B));
verifyWrite(times(0), STREAM_B, 0);
verifyNeverWrite(STREAM_C);
verifyNeverWrite(STREAM_D);
}
/**
* In this test, we root all streams at the connection, and then verify that data is split equally among the stream,
* since they all have the same weight.
*
* <pre>
* 0
* / / \ \
* A B C D
* </pre>
*/
@Test
public void samePriorityShouldDistributeBasedOnData() throws Http2Exception {
// Root the streams at the connection with the same weights.
setPriority(STREAM_A, 0, DEFAULT_PRIORITY_WEIGHT, false);
setPriority(STREAM_B, 0, DEFAULT_PRIORITY_WEIGHT, false);
setPriority(STREAM_C, 0, DEFAULT_PRIORITY_WEIGHT, false);
setPriority(STREAM_D, 0, DEFAULT_PRIORITY_WEIGHT, false);
initState(STREAM_A, 400, true);
initState(STREAM_B, 500, true);
initState(STREAM_C, 0, true);
initState(STREAM_D, 700, true);
// Set allocation quantum to 1 so it is easier to see the ratio of total bytes written between each stream.
distributor.allocationQuantum(1);
assertTrue(write(999));
assertEquals(333, captureWrites(STREAM_A));
assertEquals(333, captureWrites(STREAM_B));
verifyWrite(times(1), STREAM_C, 0);
assertEquals(333, captureWrites(STREAM_D));
}
/**
* In this test, we call distribute with 0 bytes and verify that all streams with 0 bytes are written.
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* 0
* |
* [A]
* |
* B
* / \
* C D
* </pre>
*/
@Test
public void zeroDistributeShouldWriteAllZeroFrames() throws Http2Exception {
initState(STREAM_A, 400, false);
initState(STREAM_B, 0, true);
initState(STREAM_C, 0, true);
initState(STREAM_D, 0, true);
setPriority(STREAM_B, STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
assertFalse(write(0));
verifyNeverWrite(STREAM_A);
verifyWrite(STREAM_B, 0);
verifyAnyWrite(STREAM_B, 1);
verifyWrite(STREAM_C, 0);
verifyAnyWrite(STREAM_C, 1);
verifyWrite(STREAM_D, 0);
verifyAnyWrite(STREAM_D, 1);
}
/**
* In this test, we call distribute with 100 bytes which is the total amount eligible to be written, and also have
* streams with 0 bytes to write. All of these streams should be written with a single call to distribute.
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* 0
* |
* [A]
* |
* B
* / \
* C D
* </pre>
*/
@Test
public void nonZeroDistributeShouldWriteAllZeroFramesIfAllEligibleDataIsWritten() throws Http2Exception {
initState(STREAM_A, 400, false);
initState(STREAM_B, 100, true);
initState(STREAM_C, 0, true);
initState(STREAM_D, 0, true);
setPriority(STREAM_B, STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
assertFalse(write(100));
verifyNeverWrite(STREAM_A);
verifyWrite(STREAM_B, 100);
verifyAnyWrite(STREAM_B, 1);
verifyWrite(STREAM_C, 0);
verifyAnyWrite(STREAM_C, 1);
verifyWrite(STREAM_D, 0);
verifyAnyWrite(STREAM_D, 1);
}
/**
* In this test, we shift the priority tree and verify priority bytes for each subtree are correct
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* 0
* |
* A
* |
* B
* / \
* C D
* </pre>
*/
@Test
public void bytesDistributedWithRestructureShouldBeCorrect() throws Http2Exception {
initState(STREAM_A, 400, true);
initState(STREAM_B, 500, true);
initState(STREAM_C, 600, true);
initState(STREAM_D, 700, true);
setPriority(STREAM_B, STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
assertTrue(write(500));
assertEquals(400, captureWrites(STREAM_A));
verifyWrite(STREAM_B, 100);
verifyNeverWrite(STREAM_C);
verifyNeverWrite(STREAM_D);
assertTrue(write(400));
assertEquals(400, captureWrites(STREAM_A));
assertEquals(500, captureWrites(STREAM_B));
verifyWrite(atMost(1), STREAM_C, 0);
verifyWrite(atMost(1), STREAM_D, 0);
assertFalse(write(1300));
assertEquals(400, captureWrites(STREAM_A));
assertEquals(500, captureWrites(STREAM_B));
assertEquals(600, captureWrites(STREAM_C));
assertEquals(700, captureWrites(STREAM_D));
}
/**
* In this test, we add a node to the priority tree and verify
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* 0
* / \
* A B
* |
* E
* / \
* C D
* </pre>
*/
@Test
public void bytesDistributedWithAdditionShouldBeCorrect() throws Http2Exception {
Http2Stream streamE = connection.local().createStream(STREAM_E, false);
setPriority(streamE.id(), STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
// Send a bunch of data on each stream.
initState(STREAM_A, 400, true);
initState(STREAM_B, 500, true);
initState(STREAM_C, 600, true);
initState(STREAM_D, 700, true);
initState(STREAM_E, 900, true);
assertTrue(write(900));
assertEquals(400, captureWrites(STREAM_A));
assertEquals(500, captureWrites(STREAM_B));
verifyNeverWrite(STREAM_C);
verifyNeverWrite(STREAM_D);
verifyWrite(atMost(1), STREAM_E, 0);
assertTrue(write(900));
assertEquals(400, captureWrites(STREAM_A));
assertEquals(500, captureWrites(STREAM_B));
verifyWrite(atMost(1), STREAM_C, 0);
verifyWrite(atMost(1), STREAM_D, 0);
assertEquals(900, captureWrites(STREAM_E));
assertFalse(write(1301));
assertEquals(400, captureWrites(STREAM_A));
assertEquals(500, captureWrites(STREAM_B));
assertEquals(600, captureWrites(STREAM_C));
assertEquals(700, captureWrites(STREAM_D));
assertEquals(900, captureWrites(STREAM_E));
}
/**
* In this test, we close an internal stream in the priority tree.
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*
* After the close:
* <pre>
* 0
* / | \
* C D B
* </pre>
*/
@Test
public void bytesDistributedShouldBeCorrectWithInternalStreamClose() throws Http2Exception {
initState(STREAM_A, 400, true);
initState(STREAM_B, 500, true);
initState(STREAM_C, 600, true);
initState(STREAM_D, 700, true);
stream(STREAM_A).close();
assertTrue(write(500));
verifyNeverWrite(STREAM_A);
assertEquals(500, captureWrites(STREAM_B) + captureWrites(STREAM_C) + captureWrites(STREAM_D));
assertFalse(write(1300));
verifyNeverWrite(STREAM_A);
assertEquals(500, captureWrites(STREAM_B));
assertEquals(600, captureWrites(STREAM_C));
assertEquals(700, captureWrites(STREAM_D));
}
/**
* In this test, we close a leaf stream in the priority tree and verify distribution.
*
* <pre>
* 0
* / \
* A B
* / \
* C D
* </pre>
*
* After the close:
* <pre>
* 0
* / \
* A B
* |
* D
* </pre>
*/
@Test
public void bytesDistributedShouldBeCorrectWithLeafStreamClose() throws Http2Exception {
initState(STREAM_A, 400, true);
initState(STREAM_B, 500, true);
initState(STREAM_C, 600, true);
initState(STREAM_D, 700, true);
stream(STREAM_C).close();
assertTrue(write(900));
assertEquals(400, captureWrites(STREAM_A));
assertEquals(500, captureWrites(STREAM_B));
verifyNeverWrite(STREAM_C);
verifyWrite(atMost(1), STREAM_D, 0);
assertFalse(write(700));
assertEquals(400, captureWrites(STREAM_A));
assertEquals(500, captureWrites(STREAM_B));
verifyNeverWrite(STREAM_C);
assertEquals(700, captureWrites(STREAM_D));
}
@Test
public void activeStreamDependentOnNewNonActiveStreamGetsQuantum() throws Http2Exception {
setup(0);
initState(STREAM_D, 700, true);
setPriority(STREAM_D, STREAM_E, DEFAULT_PRIORITY_WEIGHT, true);
assertFalse(write(700));
assertEquals(700, captureWrites(STREAM_D));
}
@Test
public void streamWindowLargerThanIntDoesNotInfiniteLoop() throws Http2Exception {
initState(STREAM_A, Integer.MAX_VALUE + 1L, true, true);
assertTrue(write(Integer.MAX_VALUE));
verifyWrite(STREAM_A, Integer.MAX_VALUE);
assertFalse(write(1));
verifyWrite(STREAM_A, 1);
}
private boolean write(int numBytes) throws Http2Exception {
return distributor.distribute(numBytes, writer);
}
private void verifyWrite(int streamId, int numBytes) {
verify(writer).write(same(stream(streamId)), eq(numBytes));
}
private void verifyWrite(VerificationMode mode, int streamId, int numBytes) {
verify(writer, mode).write(same(stream(streamId)), eq(numBytes));
}
private void verifyAnyWrite(int streamId, int times) {
verify(writer, times(times)).write(same(stream(streamId)), anyInt());
}
private void verifyNeverWrite(int streamId) {
verifyNeverWrite(stream(streamId));
}
private void verifyNeverWrite(Http2Stream stream) {
verify(writer, never()).write(same(stream), anyInt());
}
private int captureWrites(int streamId) {
return captureWrites(stream(streamId));
}
private int captureWrites(Http2Stream stream) {
ArgumentCaptor<Integer> captor = ArgumentCaptor.forClass(Integer.class);
verify(writer, atLeastOnce()).write(same(stream), captor.capture());
int total = 0;
for (Integer x : captor.getAllValues()) {
total += x;
}
return total;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.clusterframework.types.ResourceID;
import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutorServiceAdapter;
import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor;
import org.apache.flink.runtime.executiongraph.utils.SimpleAckingTaskManagerGateway;
import org.apache.flink.runtime.io.network.partition.JobMasterPartitionTracker;
import org.apache.flink.runtime.io.network.partition.NoOpJobMasterPartitionTracker;
import org.apache.flink.runtime.io.network.partition.ResultPartitionID;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.io.network.partition.TestingJobMasterPartitionTracker;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.IntermediateResultPartitionID;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobGraphTestUtils;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobmanager.slots.TaskManagerGateway;
import org.apache.flink.runtime.scheduler.SchedulerBase;
import org.apache.flink.runtime.scheduler.SchedulerTestingUtils;
import org.apache.flink.runtime.scheduler.TestingPhysicalSlot;
import org.apache.flink.runtime.scheduler.TestingPhysicalSlotProvider;
import org.apache.flink.runtime.shuffle.NettyShuffleMaster;
import org.apache.flink.runtime.shuffle.PartitionDescriptor;
import org.apache.flink.runtime.shuffle.ProducerDescriptor;
import org.apache.flink.runtime.shuffle.ShuffleDescriptor;
import org.apache.flink.runtime.shuffle.ShuffleMaster;
import org.apache.flink.runtime.taskmanager.LocalTaskManagerLocation;
import org.apache.flink.runtime.taskmanager.TaskManagerLocation;
import org.apache.flink.runtime.testtasks.NoOpInvokable;
import org.apache.flink.util.TestLogger;
import org.junit.ClassRule;
import org.junit.Test;
import javax.annotation.Nonnull;
import java.util.Collection;
import java.util.Collections;
import java.util.Optional;
import java.util.Queue;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/** Tests for the {@link Execution}. */
public class ExecutionPartitionLifecycleTest extends TestLogger {
@ClassRule
public static final TestingComponentMainThreadExecutor.Resource EXECUTOR_RESOURCE =
new TestingComponentMainThreadExecutor.Resource();
private Execution execution;
private ResultPartitionDeploymentDescriptor descriptor;
private ResourceID taskExecutorResourceId;
private JobID jobId;
@Test
public void testPartitionReleaseOnFinishWhileCanceling() throws Exception {
testPartitionReleaseOnStateTransitionsAfterRunning(
Execution::cancel, Execution::markFinished);
}
@Test
public void testPartitionReleaseOnCancelWhileFinished() throws Exception {
testPartitionReleaseOnStateTransitionsAfterRunning(
Execution::markFinished, Execution::cancel);
}
@Test
public void testPartitionReleaseOnSuspendWhileFinished() throws Exception {
testPartitionReleaseOnStateTransitionsAfterRunning(
Execution::markFinished, Execution::suspend);
}
private void testPartitionReleaseOnStateTransitionsAfterRunning(
Consumer<Execution> stateTransition1, Consumer<Execution> stateTransition2)
throws Exception {
final SimpleAckingTaskManagerGateway taskManagerGateway =
new SimpleAckingTaskManagerGateway();
final CompletableFuture<Tuple2<JobID, Collection<ResultPartitionID>>>
releasePartitionsCallFuture = new CompletableFuture<>();
taskManagerGateway.setReleasePartitionsConsumer(
((jobID, partitionIds) ->
releasePartitionsCallFuture.complete(Tuple2.of(jobID, partitionIds))));
final TestingShuffleMaster testingShuffleMaster = new TestingShuffleMaster();
setupExecutionGraphAndStartRunningJob(
ResultPartitionType.PIPELINED,
NoOpJobMasterPartitionTracker.INSTANCE,
taskManagerGateway,
testingShuffleMaster);
stateTransition1.accept(execution);
assertFalse(releasePartitionsCallFuture.isDone());
stateTransition2.accept(execution);
assertTrue(releasePartitionsCallFuture.isDone());
final Tuple2<JobID, Collection<ResultPartitionID>> releasePartitionsCall =
releasePartitionsCallFuture.get();
assertEquals(jobId, releasePartitionsCall.f0);
assertThat(
releasePartitionsCall.f1,
contains(descriptor.getShuffleDescriptor().getResultPartitionID()));
assertEquals(1, testingShuffleMaster.externallyReleasedPartitions.size());
assertEquals(
descriptor.getShuffleDescriptor(),
testingShuffleMaster.externallyReleasedPartitions.poll());
}
private enum PartitionReleaseResult {
NONE,
STOP_TRACKING,
STOP_TRACKING_AND_RELEASE
}
@Test
public void testPartitionTrackedAndNotReleasedWhenFinished() throws Exception {
testPartitionTrackingForStateTransition(
Execution::markFinished, PartitionReleaseResult.NONE);
}
@Test
public void testPartitionNotTrackedAndNotReleasedWhenCanceledByTM() throws Exception {
testPartitionTrackingForStateTransition(
execution -> {
execution.cancel();
execution.completeCancelling(
Collections.emptyMap(), new IOMetrics(0, 0, 0, 0), false);
},
PartitionReleaseResult.STOP_TRACKING);
}
@Test
public void testPartitionNotTrackedAndReleasedWhenCanceledByJM() throws Exception {
testPartitionTrackingForStateTransition(
execution -> {
execution.cancel();
execution.completeCancelling();
},
PartitionReleaseResult.STOP_TRACKING_AND_RELEASE);
}
@Test
public void testPartitionNotTrackedAndNotReleasedWhenFailedByTM() throws Exception {
testPartitionTrackingForStateTransition(
execution ->
execution.markFailed(
new Exception("Test exception"),
false,
Collections.emptyMap(),
new IOMetrics(0, 0, 0, 0),
false,
true),
PartitionReleaseResult.STOP_TRACKING);
}
@Test
public void testPartitionNotTrackedAndReleasedWhenFailedByJM() throws Exception {
testPartitionTrackingForStateTransition(
execution -> execution.markFailed(new Exception("Test exception")),
PartitionReleaseResult.STOP_TRACKING_AND_RELEASE);
}
private void testPartitionTrackingForStateTransition(
final Consumer<Execution> stateTransition,
final PartitionReleaseResult partitionReleaseResult)
throws Exception {
CompletableFuture<Tuple2<ResourceID, ResultPartitionDeploymentDescriptor>>
partitionStartTrackingFuture = new CompletableFuture<>();
CompletableFuture<Collection<ResultPartitionID>> partitionStopTrackingFuture =
new CompletableFuture<>();
CompletableFuture<Collection<ResultPartitionID>> partitionStopTrackingAndReleaseFuture =
new CompletableFuture<>();
final TestingJobMasterPartitionTracker partitionTracker =
new TestingJobMasterPartitionTracker();
partitionTracker.setStartTrackingPartitionsConsumer(
(resourceID, resultPartitionDeploymentDescriptor) ->
partitionStartTrackingFuture.complete(
Tuple2.of(resourceID, resultPartitionDeploymentDescriptor)));
partitionTracker.setStopTrackingPartitionsConsumer(partitionStopTrackingFuture::complete);
partitionTracker.setStopTrackingAndReleasePartitionsConsumer(
partitionStopTrackingAndReleaseFuture::complete);
setupExecutionGraphAndStartRunningJob(
ResultPartitionType.BLOCKING,
partitionTracker,
new SimpleAckingTaskManagerGateway(),
NettyShuffleMaster.INSTANCE);
Tuple2<ResourceID, ResultPartitionDeploymentDescriptor> startTrackingCall =
partitionStartTrackingFuture.get();
assertThat(startTrackingCall.f0, equalTo(taskExecutorResourceId));
assertThat(startTrackingCall.f1, equalTo(descriptor));
stateTransition.accept(execution);
switch (partitionReleaseResult) {
case NONE:
assertFalse(partitionStopTrackingFuture.isDone());
assertFalse(partitionStopTrackingAndReleaseFuture.isDone());
break;
case STOP_TRACKING:
assertTrue(partitionStopTrackingFuture.isDone());
assertFalse(partitionStopTrackingAndReleaseFuture.isDone());
final Collection<ResultPartitionID> stopTrackingCall =
partitionStopTrackingFuture.get();
assertEquals(
Collections.singletonList(
descriptor.getShuffleDescriptor().getResultPartitionID()),
stopTrackingCall);
break;
case STOP_TRACKING_AND_RELEASE:
assertFalse(partitionStopTrackingFuture.isDone());
assertTrue(partitionStopTrackingAndReleaseFuture.isDone());
final Collection<ResultPartitionID> stopTrackingAndReleaseCall =
partitionStopTrackingAndReleaseFuture.get();
assertEquals(
Collections.singletonList(
descriptor.getShuffleDescriptor().getResultPartitionID()),
stopTrackingAndReleaseCall);
break;
}
}
private void setupExecutionGraphAndStartRunningJob(
ResultPartitionType resultPartitionType,
JobMasterPartitionTracker partitionTracker,
TaskManagerGateway taskManagerGateway,
ShuffleMaster<?> shuffleMaster)
throws Exception {
final JobVertex producerVertex = createNoOpJobVertex();
final JobVertex consumerVertex = createNoOpJobVertex();
consumerVertex.connectNewDataSetAsInput(
producerVertex, DistributionPattern.ALL_TO_ALL, resultPartitionType);
final TaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation();
final TestingPhysicalSlotProvider physicalSlotProvider =
TestingPhysicalSlotProvider.create(
(resourceProfile) ->
CompletableFuture.completedFuture(
TestingPhysicalSlot.builder()
.withTaskManagerGateway(taskManagerGateway)
.withTaskManagerLocation(taskManagerLocation)
.build()));
final JobGraph jobGraph = JobGraphTestUtils.batchJobGraph(producerVertex, consumerVertex);
final SchedulerBase scheduler =
SchedulerTestingUtils.newSchedulerBuilder(
jobGraph, ComponentMainThreadExecutorServiceAdapter.forMainThread())
.setExecutionSlotAllocatorFactory(
SchedulerTestingUtils.newSlotSharingExecutionSlotAllocatorFactory(
physicalSlotProvider))
.setShuffleMaster(shuffleMaster)
.setPartitionTracker(partitionTracker)
.build();
final ExecutionGraph executionGraph = scheduler.getExecutionGraph();
final ExecutionJobVertex executionJobVertex =
executionGraph.getJobVertex(producerVertex.getID());
final ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
execution = executionVertex.getCurrentExecutionAttempt();
scheduler.startScheduling();
execution.switchToRecovering();
execution.switchToRunning();
final IntermediateResultPartitionID expectedIntermediateResultPartitionId =
executionJobVertex.getProducedDataSets()[0].getPartitions()[0].getPartitionId();
descriptor =
execution
.getResultPartitionDeploymentDescriptor(
expectedIntermediateResultPartitionId)
.get();
taskExecutorResourceId = taskManagerLocation.getResourceID();
jobId = executionGraph.getJobID();
}
@Nonnull
private JobVertex createNoOpJobVertex() {
final JobVertex jobVertex = new JobVertex("Test vertex", new JobVertexID());
jobVertex.setInvokableClass(NoOpInvokable.class);
return jobVertex;
}
private static class TestingShuffleMaster implements ShuffleMaster<ShuffleDescriptor> {
final Queue<ShuffleDescriptor> externallyReleasedPartitions = new ArrayBlockingQueue<>(4);
@Override
public CompletableFuture<ShuffleDescriptor> registerPartitionWithProducer(
PartitionDescriptor partitionDescriptor, ProducerDescriptor producerDescriptor) {
return CompletableFuture.completedFuture(
new ShuffleDescriptor() {
@Override
public ResultPartitionID getResultPartitionID() {
return new ResultPartitionID(
partitionDescriptor.getPartitionId(),
producerDescriptor.getProducerExecutionId());
}
@Override
public Optional<ResourceID> storesLocalResourcesOn() {
return Optional.of(producerDescriptor.getProducerLocation());
}
});
}
@Override
public void releasePartitionExternally(ShuffleDescriptor shuffleDescriptor) {
externallyReleasedPartitions.add(shuffleDescriptor);
}
}
}
| |
/*
* Copyright 2008-2009 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package voldemort.xml;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.XMLConstants;
import javax.xml.transform.Source;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.jdom.transform.JDOMSource;
import org.xml.sax.SAXException;
import voldemort.client.RoutingTier;
import voldemort.routing.RoutingStrategyType;
import voldemort.serialization.Compression;
import voldemort.serialization.SerializerDefinition;
import voldemort.store.StoreDefinition;
import voldemort.store.StoreDefinitionBuilder;
import voldemort.store.StoreUtils;
import voldemort.store.slop.strategy.HintedHandoffStrategyType;
import voldemort.store.system.SystemStoreConstants;
import voldemort.store.views.ViewStorageConfiguration;
import voldemort.utils.Utils;
import com.google.common.collect.Lists;
/**
* Parses a stores.xml file
*
*
*/
public class StoreDefinitionsMapper {
public final static String STORES_ELMT = "stores";
public final static String STORE_ELMT = "store";
public final static String STORE_DESCRIPTION_ELMT = "description";
public final static String STORE_OWNERS_ELMT = "owners";
public final static String STORE_NAME_ELMT = "name";
public final static String STORE_PERSISTENCE_ELMT = "persistence";
public final static String STORE_KEY_SERIALIZER_ELMT = "key-serializer";
public final static String STORE_VALUE_SERIALIZER_ELMT = "value-serializer";
public final static String STORE_TRANSFORM_SERIALIZER_ELMT = "transforms-serializer";
public final static String STORE_SERIALIZATION_TYPE_ELMT = "type";
public final static String STORE_SERIALIZATION_META_ELMT = "schema-info";
public final static String STORE_COMPRESSION_ELMT = "compression";
public final static String STORE_COMPRESSION_TYPE_ELMT = "type";
public final static String STORE_COMPRESSION_OPTIONS_ELMT = "options";
public final static String STORE_ROUTING_TIER_ELMT = "routing";
public final static String STORE_REPLICATION_FACTOR_ELMT = "replication-factor";
public final static String STORE_REQUIRED_WRITES_ELMT = "required-writes";
public final static String STORE_PREFERRED_WRITES_ELMT = "preferred-writes";
public final static String STORE_REQUIRED_READS_ELMT = "required-reads";
public final static String STORE_PREFERRED_READS_ELMT = "preferred-reads";
public final static String STORE_RETENTION_POLICY_ELMT = "retention-days";
public final static String STORE_RETENTION_FREQ_ELMT = "retention-frequency";
public final static String STORE_RETENTION_SCAN_THROTTLE_RATE_ELMT = "retention-scan-throttle-rate";
public final static String STORE_ROUTING_STRATEGY = "routing-strategy";
public final static String STORE_ZONE_ID_ELMT = "zone-id";
public final static String STORE_ZONE_REPLICATION_FACTOR_ELMT = "zone-replication-factor";
public final static String STORE_ZONE_COUNT_READS = "zone-count-reads";
public final static String STORE_ZONE_COUNT_WRITES = "zone-count-writes";
public final static String HINTED_HANDOFF_STRATEGY = "hinted-handoff-strategy";
public final static String HINT_PREFLIST_SIZE = "hint-preflist-size";
public final static String VIEW_ELMT = "view";
public final static String VIEW_TARGET_ELMT = "view-of";
public final static String VIEW_TRANS_ELMT = "view-class";
public final static String VIEW_SERIALIZER_FACTORY_ELMT = "view-serializer-factory";
private final static String STORE_VERSION_ATTR = "version";
private final static String STORE_MEMORY_FOOTPRINT = "memory-footprint";
private static final Logger logger = Logger.getLogger(StoreDefinitionsMapper.class.getName());
private final Schema schema;
public StoreDefinitionsMapper() {
try {
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Source source = new StreamSource(StoreDefinitionsMapper.class.getResourceAsStream("stores.xsd"));
this.schema = factory.newSchema(source);
} catch(SAXException e) {
throw new MappingException(e);
}
}
public List<StoreDefinition> readStoreList(File f) throws IOException {
FileReader reader = null;
try {
reader = new FileReader(f);
return readStoreList(reader);
} finally {
if(reader != null)
reader.close();
}
}
public List<StoreDefinition> readStoreList(Reader input) {
return readStoreList(input, true);
}
public List<StoreDefinition> readStoreList(Reader input, boolean verifySchema) {
try {
SAXBuilder builder = new SAXBuilder();
Document doc = builder.build(input);
if(verifySchema) {
Validator validator = schema.newValidator();
validator.validate(new JDOMSource(doc));
}
Element root = doc.getRootElement();
if(!root.getName().equals(STORES_ELMT))
throw new MappingException("Invalid root element: "
+ doc.getRootElement().getName());
List<StoreDefinition> stores = new ArrayList<StoreDefinition>();
for(Object store: root.getChildren(STORE_ELMT))
stores.add(readStore((Element) store));
for(Object view: root.getChildren(VIEW_ELMT))
stores.add(readView((Element) view, stores));
return stores;
} catch(JDOMException e) {
throw new MappingException(e);
} catch(SAXException e) {
throw new MappingException(e);
} catch(IOException e) {
throw new MappingException(e);
}
}
public static StoreDefinition readStore(Reader input) {
SAXBuilder builder = new SAXBuilder();
try {
Document doc = builder.build(input);
Element root = doc.getRootElement();
return readStore(root);
} catch(JDOMException e) {
throw new MappingException(e);
} catch(IOException e) {
throw new MappingException(e);
}
}
@SuppressWarnings("unchecked")
private static StoreDefinition readStore(Element store) {
String name = store.getChildText(STORE_NAME_ELMT);
String storeType = store.getChildText(STORE_PERSISTENCE_ELMT);
String description = store.getChildText(STORE_DESCRIPTION_ELMT);
String ownerText = store.getChildText(STORE_OWNERS_ELMT);
List<String> owners = Lists.newArrayList();
if(ownerText != null) {
for(String owner: Utils.COMMA_SEP.split(ownerText.trim()))
if(owner.trim().length() > 0)
owners.add(owner);
}
int replicationFactor = Integer.parseInt(store.getChildText(STORE_REPLICATION_FACTOR_ELMT));
HashMap<Integer, Integer> zoneReplicationFactor = null;
Element zoneReplicationFactorNode = store.getChild(STORE_ZONE_REPLICATION_FACTOR_ELMT);
if(zoneReplicationFactorNode != null) {
zoneReplicationFactor = new HashMap<Integer, Integer>();
for(Element node: (List<Element>) zoneReplicationFactorNode.getChildren(STORE_REPLICATION_FACTOR_ELMT)) {
int zone = Integer.parseInt(node.getAttribute(STORE_ZONE_ID_ELMT).getValue());
int repFactor = Integer.parseInt(node.getText());
zoneReplicationFactor.put(zone, repFactor);
}
}
String zoneCountReadsStr = store.getChildText(STORE_ZONE_COUNT_READS);
Integer zoneCountReads = null;
if(zoneCountReadsStr != null)
zoneCountReads = Integer.parseInt(zoneCountReadsStr);
String zoneCountWritesStr = store.getChildText(STORE_ZONE_COUNT_WRITES);
Integer zoneCountWrites = null;
if(zoneCountWritesStr != null)
zoneCountWrites = Integer.parseInt(zoneCountWritesStr);
int requiredReads = Integer.parseInt(store.getChildText(STORE_REQUIRED_READS_ELMT));
int requiredWrites = Integer.parseInt(store.getChildText(STORE_REQUIRED_WRITES_ELMT));
String preferredReadsStr = store.getChildText(STORE_PREFERRED_READS_ELMT);
Integer preferredReads = null;
if(preferredReadsStr != null)
preferredReads = Integer.parseInt(preferredReadsStr);
String preferredWritesStr = store.getChildText(STORE_PREFERRED_WRITES_ELMT);
Integer preferredWrites = null;
if(preferredWritesStr != null)
preferredWrites = Integer.parseInt(preferredWritesStr);
SerializerDefinition keySerializer = readSerializer(store.getChild(STORE_KEY_SERIALIZER_ELMT));
if(keySerializer.getAllSchemaInfoVersions().size() > 1)
throw new MappingException("Only a single schema is allowed for the store key.");
SerializerDefinition valueSerializer = readSerializer(store.getChild(STORE_VALUE_SERIALIZER_ELMT));
RoutingTier routingTier = RoutingTier.fromDisplay(store.getChildText(STORE_ROUTING_TIER_ELMT));
String routingStrategyType = (null != store.getChildText(STORE_ROUTING_STRATEGY)) ? store.getChildText(STORE_ROUTING_STRATEGY)
: RoutingStrategyType.CONSISTENT_STRATEGY;
Element retention = store.getChild(STORE_RETENTION_POLICY_ELMT);
Integer retentionPolicyDays = null;
Integer retentionThrottleRate = null;
Integer retentionFreqDays = null;
if(retention != null) {
int retentionDays = Integer.parseInt(retention.getText());
if(retentionDays > 0) {
retentionPolicyDays = retentionDays;
Element throttleRate = store.getChild(STORE_RETENTION_SCAN_THROTTLE_RATE_ELMT);
if(throttleRate != null)
retentionThrottleRate = Integer.parseInt(throttleRate.getText());
Element retentionFreqDaysElement = store.getChild(STORE_RETENTION_FREQ_ELMT);
if(retentionFreqDaysElement != null)
retentionFreqDays = Integer.parseInt(retentionFreqDaysElement.getText());
} else {
logger.error("Invalid retention policy days set. Should be greater than zero. ignoring value "
+ retentionDays);
}
}
if(routingStrategyType.compareTo(RoutingStrategyType.ZONE_STRATEGY) == 0
&& !SystemStoreConstants.isSystemStore(name)) {
if(zoneCountReads == null || zoneCountWrites == null || zoneReplicationFactor == null) {
throw new MappingException("Have not set one of the following correctly for store '"
+ name
+ "' - "
+ STORE_ZONE_COUNT_READS
+ ", "
+ STORE_ZONE_COUNT_WRITES
+ ", "
+ STORE_ZONE_REPLICATION_FACTOR_ELMT);
}
}
HintedHandoffStrategyType hintedHandoffStrategy = null;
if(store.getChildText(HINTED_HANDOFF_STRATEGY) != null)
hintedHandoffStrategy = HintedHandoffStrategyType.fromDisplay(store.getChildText(HINTED_HANDOFF_STRATEGY));
String hintPrefListSizeStr = store.getChildText(HINT_PREFLIST_SIZE);
Integer hintPrefListSize = (null != hintPrefListSizeStr) ? Integer.parseInt(hintPrefListSizeStr)
: null;
String memoryFootprintStr = store.getChildText(STORE_MEMORY_FOOTPRINT);
long memoryFootprintMB = 0;
if(memoryFootprintStr != null)
memoryFootprintMB = Long.parseLong(memoryFootprintStr);
return new StoreDefinitionBuilder().setName(name)
.setType(storeType)
.setDescription(description)
.setOwners(owners)
.setKeySerializer(keySerializer)
.setValueSerializer(valueSerializer)
.setRoutingPolicy(routingTier)
.setRoutingStrategyType(routingStrategyType)
.setReplicationFactor(replicationFactor)
.setPreferredReads(preferredReads)
.setRequiredReads(requiredReads)
.setPreferredWrites(preferredWrites)
.setRequiredWrites(requiredWrites)
.setRetentionPeriodDays(retentionPolicyDays)
.setRetentionScanThrottleRate(retentionThrottleRate)
.setRetentionFrequencyDays(retentionFreqDays)
.setZoneReplicationFactor(zoneReplicationFactor)
.setZoneCountReads(zoneCountReads)
.setZoneCountWrites(zoneCountWrites)
.setHintedHandoffStrategy(hintedHandoffStrategy)
.setHintPrefListSize(hintPrefListSize)
.setMemoryFootprintMB(memoryFootprintMB)
.build();
}
private StoreDefinition readView(Element store, List<StoreDefinition> stores) {
String name = store.getChildText(STORE_NAME_ELMT);
String targetName = store.getChildText(VIEW_TARGET_ELMT);
String description = store.getChildText(STORE_DESCRIPTION_ELMT);
String ownerText = store.getChildText(STORE_OWNERS_ELMT);
List<String> owners = Lists.newArrayList();
if(ownerText != null) {
for(String owner: Utils.COMMA_SEP.split(ownerText.trim()))
if(owner.trim().length() > 0)
owners.add(owner);
}
StoreDefinition target = StoreUtils.getStoreDef(stores, targetName);
if(target == null)
throw new MappingException("View \"" + name + "\" has target store \"" + targetName
+ "\" but no such store exists");
int requiredReads = getChildWithDefault(store,
STORE_REQUIRED_READS_ELMT,
target.getRequiredReads());
int preferredReads = getChildWithDefault(store,
STORE_PREFERRED_READS_ELMT,
target.getRequiredReads());
int requiredWrites = getChildWithDefault(store,
STORE_REQUIRED_WRITES_ELMT,
target.getRequiredReads());
int preferredWrites = getChildWithDefault(store,
STORE_PREFERRED_WRITES_ELMT,
target.getRequiredReads());
Integer zoneCountReads = getChildWithDefault(store,
STORE_ZONE_COUNT_READS,
target.getZoneCountReads());
Integer zoneCountWrites = getChildWithDefault(store,
STORE_ZONE_COUNT_WRITES,
target.getZoneCountWrites());
String viewSerializerFactoryName = null;
if(store.getChildText(VIEW_SERIALIZER_FACTORY_ELMT) != null) {
viewSerializerFactoryName = store.getChild(VIEW_SERIALIZER_FACTORY_ELMT).getText();
}
SerializerDefinition keySerializer = target.getKeySerializer();
SerializerDefinition valueSerializer = target.getValueSerializer();
if(store.getChild(STORE_VALUE_SERIALIZER_ELMT) != null)
valueSerializer = readSerializer(store.getChild(STORE_VALUE_SERIALIZER_ELMT));
SerializerDefinition transformSerializer = target.getTransformsSerializer();
if(store.getChild(STORE_TRANSFORM_SERIALIZER_ELMT) != null)
transformSerializer = readSerializer(store.getChild(STORE_TRANSFORM_SERIALIZER_ELMT));
RoutingTier routingTier = null;
if(store.getChildText(STORE_ROUTING_TIER_ELMT) != null) {
routingTier = RoutingTier.fromDisplay(store.getChildText(STORE_ROUTING_TIER_ELMT));
} else {
routingTier = target.getRoutingPolicy();
}
String viewClass = store.getChildText(VIEW_TRANS_ELMT);
return new StoreDefinitionBuilder().setName(name)
.setViewOf(targetName)
.setType(ViewStorageConfiguration.TYPE_NAME)
.setDescription(description)
.setOwners(owners)
.setRoutingPolicy(routingTier)
.setRoutingStrategyType(target.getRoutingStrategyType())
.setKeySerializer(keySerializer)
.setValueSerializer(valueSerializer)
.setTransformsSerializer(transformSerializer)
.setReplicationFactor(target.getReplicationFactor())
.setZoneReplicationFactor(target.getZoneReplicationFactor())
.setPreferredReads(preferredReads)
.setRequiredReads(requiredReads)
.setPreferredWrites(preferredWrites)
.setRequiredWrites(requiredWrites)
.setZoneCountReads(zoneCountReads)
.setZoneCountWrites(zoneCountWrites)
.setView(viewClass)
.setSerializerFactory(viewSerializerFactoryName)
.build();
}
public static SerializerDefinition readSerializer(Element elmt) {
String name = elmt.getChild(STORE_SERIALIZATION_TYPE_ELMT).getText();
boolean hasVersion = true;
Map<Integer, String> schemaInfosByVersion = new HashMap<Integer, String>();
for(Object schemaInfo: elmt.getChildren(STORE_SERIALIZATION_META_ELMT)) {
Element schemaInfoElmt = (Element) schemaInfo;
String versionStr = schemaInfoElmt.getAttributeValue(STORE_VERSION_ATTR);
int version;
if(versionStr == null) {
version = 0;
} else if(versionStr.equals("none")) {
version = 0;
hasVersion = false;
} else {
version = Integer.parseInt(versionStr);
}
String info = schemaInfoElmt.getText();
String previous = schemaInfosByVersion.put(version, info);
if(previous != null)
throw new MappingException("Duplicate version " + version
+ " found in schema info.");
}
if(!hasVersion && schemaInfosByVersion.size() > 1)
throw new IllegalArgumentException("Specified multiple schemas AND version=none, which is not permitted.");
Element compressionElmt = elmt.getChild(STORE_COMPRESSION_ELMT);
Compression compression = null;
if(compressionElmt != null)
compression = new Compression(compressionElmt.getChildText("type"),
compressionElmt.getChildText("options"));
return new SerializerDefinition(name, schemaInfosByVersion, hasVersion, compression);
}
public String writeStoreList(List<StoreDefinition> stores) {
Element root = new Element(STORES_ELMT);
for(StoreDefinition def: stores) {
if(def.isView())
root.addContent(viewToElement(def));
else
root.addContent(storeToElement(def));
}
XMLOutputter serializer = new XMLOutputter(Format.getPrettyFormat());
return serializer.outputString(root);
}
public String writeStore(StoreDefinition store) {
XMLOutputter serializer = new XMLOutputter(Format.getPrettyFormat());
if(store.isView())
return serializer.outputString(viewToElement(store));
else
return serializer.outputString(storeToElement(store));
}
private Element storeToElement(StoreDefinition storeDefinition) {
Element store = new Element(STORE_ELMT);
store.addContent(new Element(STORE_NAME_ELMT).setText(storeDefinition.getName()));
store.addContent(new Element(STORE_PERSISTENCE_ELMT).setText(storeDefinition.getType()));
if(storeDefinition.getDescription() != null)
store.addContent(new Element(STORE_DESCRIPTION_ELMT).setText(storeDefinition.getDescription()));
if(storeDefinition.getOwners() != null && storeDefinition.getOwners().size() > 0) {
String ownersText = StringUtils.join(storeDefinition.getOwners().toArray(), ", ");
store.addContent(new Element(STORE_OWNERS_ELMT).setText(ownersText));
}
store.addContent(new Element(STORE_ROUTING_STRATEGY).setText(storeDefinition.getRoutingStrategyType()));
store.addContent(new Element(STORE_ROUTING_TIER_ELMT).setText(storeDefinition.getRoutingPolicy()
.toDisplay()));
store.addContent(new Element(STORE_REPLICATION_FACTOR_ELMT).setText(Integer.toString(storeDefinition.getReplicationFactor())));
HashMap<Integer, Integer> zoneReplicationFactor = storeDefinition.getZoneReplicationFactor();
if(zoneReplicationFactor != null) {
Element zoneReplicationFactorNode = new Element(STORE_ZONE_REPLICATION_FACTOR_ELMT);
for(Integer zone: zoneReplicationFactor.keySet()) {
zoneReplicationFactorNode.addContent(new Element(STORE_REPLICATION_FACTOR_ELMT).setText(Integer.toString(zoneReplicationFactor.get(zone)))
.setAttribute(STORE_ZONE_ID_ELMT,
Integer.toString(zone)));
}
store.addContent(zoneReplicationFactorNode);
}
if(storeDefinition.hasPreferredReads())
store.addContent(new Element(STORE_PREFERRED_READS_ELMT).setText(Integer.toString(storeDefinition.getPreferredReads())));
store.addContent(new Element(STORE_REQUIRED_READS_ELMT).setText(Integer.toString(storeDefinition.getRequiredReads())));
if(storeDefinition.hasPreferredWrites())
store.addContent(new Element(STORE_PREFERRED_WRITES_ELMT).setText(Integer.toString(storeDefinition.getPreferredWrites())));
store.addContent(new Element(STORE_REQUIRED_WRITES_ELMT).setText(Integer.toString(storeDefinition.getRequiredWrites())));
if(storeDefinition.hasZoneCountReads())
store.addContent(new Element(STORE_ZONE_COUNT_READS).setText(Integer.toString(storeDefinition.getZoneCountReads())));
if(storeDefinition.hasZoneCountWrites())
store.addContent(new Element(STORE_ZONE_COUNT_WRITES).setText(Integer.toString(storeDefinition.getZoneCountWrites())));
if(storeDefinition.hasHintedHandoffStrategyType())
store.addContent(new Element(HINTED_HANDOFF_STRATEGY).setText(storeDefinition.getHintedHandoffStrategyType()
.toDisplay()));
if(storeDefinition.hasHintPreflistSize())
store.addContent(new Element(HINT_PREFLIST_SIZE).setText(Integer.toString(storeDefinition.getHintPrefListSize())));
Element keySerializer = new Element(STORE_KEY_SERIALIZER_ELMT);
addSerializer(keySerializer, storeDefinition.getKeySerializer());
store.addContent(keySerializer);
Element valueSerializer = new Element(STORE_VALUE_SERIALIZER_ELMT);
addSerializer(valueSerializer, storeDefinition.getValueSerializer());
store.addContent(valueSerializer);
if(storeDefinition.hasRetentionPeriod())
store.addContent(new Element(STORE_RETENTION_POLICY_ELMT).setText(Integer.toString(storeDefinition.getRetentionDays())));
if(storeDefinition.hasRetentionScanThrottleRate())
store.addContent(new Element(STORE_RETENTION_SCAN_THROTTLE_RATE_ELMT).setText(Integer.toString(storeDefinition.getRetentionScanThrottleRate())));
if(storeDefinition.hasMemoryFootprint()) {
store.addContent(new Element(STORE_MEMORY_FOOTPRINT).setText(Long.toString(storeDefinition.getMemoryFootprintMB())));
}
return store;
}
private Element viewToElement(StoreDefinition storeDefinition) {
Element store = new Element(VIEW_ELMT);
store.addContent(new Element(STORE_NAME_ELMT).setText(storeDefinition.getName()));
store.addContent(new Element(VIEW_TARGET_ELMT).setText(storeDefinition.getViewTargetStoreName()));
if(storeDefinition.getDescription() != null)
store.addContent(new Element(STORE_DESCRIPTION_ELMT).setText(storeDefinition.getDescription()));
if(storeDefinition.getOwners() != null && storeDefinition.getOwners().size() > 0) {
String ownersText = StringUtils.join(storeDefinition.getOwners().toArray(), ", ");
store.addContent(new Element(STORE_OWNERS_ELMT).setText(ownersText));
}
if(storeDefinition.getValueTransformation() == null)
throw new MappingException("View " + storeDefinition.getName()
+ " has no defined transformation class.");
store.addContent(new Element(VIEW_TRANS_ELMT).setText(storeDefinition.getValueTransformation()));
store.addContent(new Element(STORE_ROUTING_TIER_ELMT).setText(storeDefinition.getRoutingPolicy()
.toDisplay()));
if(storeDefinition.hasPreferredReads())
store.addContent(new Element(STORE_PREFERRED_READS_ELMT).setText(Integer.toString(storeDefinition.getPreferredReads())));
store.addContent(new Element(STORE_REQUIRED_READS_ELMT).setText(Integer.toString(storeDefinition.getRequiredReads())));
if(storeDefinition.hasPreferredWrites())
store.addContent(new Element(STORE_PREFERRED_WRITES_ELMT).setText(Integer.toString(storeDefinition.getPreferredWrites())));
store.addContent(new Element(STORE_REQUIRED_WRITES_ELMT).setText(Integer.toString(storeDefinition.getRequiredWrites())));
if(storeDefinition.hasZoneCountReads())
store.addContent(new Element(STORE_ZONE_COUNT_READS).setText(Integer.toString(storeDefinition.getZoneCountReads())));
if(storeDefinition.hasZoneCountWrites())
store.addContent(new Element(STORE_ZONE_COUNT_WRITES).setText(Integer.toString(storeDefinition.getZoneCountWrites())));
Element valueSerializer = new Element(STORE_VALUE_SERIALIZER_ELMT);
addSerializer(valueSerializer, storeDefinition.getValueSerializer());
store.addContent(valueSerializer);
Element transformsSerializer = new Element(STORE_TRANSFORM_SERIALIZER_ELMT);
if(storeDefinition.getTransformsSerializer() != null) {
addSerializer(transformsSerializer, storeDefinition.getTransformsSerializer());
store.addContent(transformsSerializer);
}
Element serializerFactory = new Element(VIEW_SERIALIZER_FACTORY_ELMT);
if(storeDefinition.getSerializerFactory() != null) {
serializerFactory.setText(storeDefinition.getSerializerFactory());
store.addContent(serializerFactory);
}
return store;
}
public static void addSerializer(Element parent, SerializerDefinition def) {
parent.addContent(new Element(STORE_SERIALIZATION_TYPE_ELMT).setText(def.getName()));
if(def.hasSchemaInfo()) {
for(Map.Entry<Integer, String> entry: def.getAllSchemaInfoVersions().entrySet()) {
Element schemaElmt = new Element(STORE_SERIALIZATION_META_ELMT);
if(def.hasVersion())
schemaElmt.setAttribute(STORE_VERSION_ATTR, Integer.toString(entry.getKey()));
else
schemaElmt.setAttribute(STORE_VERSION_ATTR, "none");
schemaElmt.setText(entry.getValue());
parent.addContent(schemaElmt);
}
}
if(def.hasCompression()) {
Compression compression = def.getCompression();
Element compressionElmt = new Element(STORE_COMPRESSION_ELMT);
Element type = new Element(STORE_COMPRESSION_TYPE_ELMT);
type.setText(compression.getType());
compressionElmt.addContent(type);
String optionsText = compression.getOptions();
if(optionsText != null) {
Element options = new Element(STORE_COMPRESSION_OPTIONS_ELMT);
options.setText(optionsText);
compressionElmt.addContent(options);
}
parent.addContent(compressionElmt);
}
}
public Integer getChildWithDefault(Element elmt, String property, Integer defaultVal) {
if(elmt.getChildText(property) == null)
return defaultVal;
else
return Integer.parseInt(elmt.getChildText(property));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.web.api.dto.status;
import io.swagger.annotations.ApiModelProperty;
import javax.xml.bind.annotation.XmlType;
/**
* The status of this NiFi controller.
*/
@XmlType(name = "controllerStatus")
public class ControllerStatusDTO implements Cloneable {
private Integer activeThreadCount = 0;
private Integer terminatedThreadCount = 0;
private String queued;
private Integer flowFilesQueued = 0;
private Long bytesQueued = 0L;
private Integer runningCount = 0;
private Integer stoppedCount = 0;
private Integer invalidCount = 0;
private Integer disabledCount = 0;
private Integer activeRemotePortCount = 0;
private Integer inactiveRemotePortCount = 0;
private Integer upToDateCount;
private Integer locallyModifiedCount;
private Integer staleCount;
private Integer locallyModifiedAndStaleCount;
private Integer syncFailureCount;
/**
* The active thread count.
*
* @return The active thread count
*/
@ApiModelProperty("The number of active threads in the NiFi.")
public Integer getActiveThreadCount() {
return activeThreadCount;
}
public void setActiveThreadCount(Integer activeThreadCount) {
this.activeThreadCount = activeThreadCount;
}
/**
* The terminated thread count.
*
* @return The terminated thread count
*/
@ApiModelProperty("The number of terminated threads in the NiFi.")
public Integer getTerminatedThreadCount() {
return terminatedThreadCount;
}
public void setTerminatedThreadCount(Integer terminatedThreadCount) {
this.terminatedThreadCount = terminatedThreadCount;
}
/**
* @return queue for the controller
*/
@ApiModelProperty("The number of flowfiles queued in the NiFi.")
public String getQueued() {
return queued;
}
public void setQueued(String queued) {
this.queued = queued;
}
/**
* @return number of running components in this controller
*/
@ApiModelProperty("The number of running components in the NiFi.")
public Integer getRunningCount() {
return runningCount;
}
public void setRunningCount(Integer runningCount) {
this.runningCount = runningCount;
}
/**
* @return number of stopped components in this controller
*/
@ApiModelProperty("The number of stopped components in the NiFi.")
public Integer getStoppedCount() {
return stoppedCount;
}
public void setStoppedCount(Integer stoppedCount) {
this.stoppedCount = stoppedCount;
}
/**
* @return number of invalid components in this controller
*/
@ApiModelProperty("The number of invalid components in the NiFi.")
public Integer getInvalidCount() {
return invalidCount;
}
public void setInvalidCount(Integer invalidCount) {
this.invalidCount = invalidCount;
}
/**
* @return number of disabled components in this controller
*/
@ApiModelProperty("The number of disabled components in the NiFi.")
public Integer getDisabledCount() {
return disabledCount;
}
public void setDisabledCount(Integer disabledCount) {
this.disabledCount = disabledCount;
}
/**
* @return number of active remote ports in this controller
*/
@ApiModelProperty("The number of active remote ports in the NiFi.")
public Integer getActiveRemotePortCount() {
return activeRemotePortCount;
}
public void setActiveRemotePortCount(Integer activeRemotePortCount) {
this.activeRemotePortCount = activeRemotePortCount;
}
/**
* @return number of inactive remote ports in this controller
*/
@ApiModelProperty("The number of inactive remote ports in the NiFi.")
public Integer getInactiveRemotePortCount() {
return inactiveRemotePortCount;
}
public void setInactiveRemotePortCount(Integer inactiveRemotePortCount) {
this.inactiveRemotePortCount = inactiveRemotePortCount;
}
@ApiModelProperty("The number of FlowFiles queued across the entire flow")
public Integer getFlowFilesQueued() {
return flowFilesQueued;
}
public void setFlowFilesQueued(Integer flowFilesQueued) {
this.flowFilesQueued = flowFilesQueued;
}
@ApiModelProperty("The size of the FlowFiles queued across the entire flow")
public Long getBytesQueued() {
return bytesQueued;
}
public void setBytesQueued(Long bytesQueued) {
this.bytesQueued = bytesQueued;
}
@ApiModelProperty("The number of up to date versioned process groups in the NiFi.")
public Integer getUpToDateCount() {
return upToDateCount;
}
public void setUpToDateCount(Integer upToDateCount) {
this.upToDateCount = upToDateCount;
}
@ApiModelProperty("The number of locally modified versioned process groups in the NiFi.")
public Integer getLocallyModifiedCount() {
return locallyModifiedCount;
}
public void setLocallyModifiedCount(Integer locallyModifiedCount) {
this.locallyModifiedCount = locallyModifiedCount;
}
@ApiModelProperty("The number of stale versioned process groups in the NiFi.")
public Integer getStaleCount() {
return staleCount;
}
public void setStaleCount(Integer staleCount) {
this.staleCount = staleCount;
}
@ApiModelProperty("The number of locally modified and stale versioned process groups in the NiFi.")
public Integer getLocallyModifiedAndStaleCount() {
return locallyModifiedAndStaleCount;
}
public void setLocallyModifiedAndStaleCount(Integer locallyModifiedAndStaleCount) {
this.locallyModifiedAndStaleCount = locallyModifiedAndStaleCount;
}
@ApiModelProperty("The number of versioned process groups in the NiFi that are unable to sync to a registry.")
public Integer getSyncFailureCount() {
return syncFailureCount;
}
public void setSyncFailureCount(Integer syncFailureCount) {
this.syncFailureCount = syncFailureCount;
}
@Override
public ControllerStatusDTO clone() {
final ControllerStatusDTO other = new ControllerStatusDTO();
other.setActiveThreadCount(getActiveThreadCount());
other.setTerminatedThreadCount(getTerminatedThreadCount());
other.setQueued(getQueued());
other.setFlowFilesQueued(getFlowFilesQueued());
other.setBytesQueued(getBytesQueued());
other.setRunningCount(getRunningCount());
other.setStoppedCount(getStoppedCount());
other.setInvalidCount(getInvalidCount());
other.setDisabledCount(getDisabledCount());
other.setActiveRemotePortCount(getActiveRemotePortCount());
other.setInactiveRemotePortCount(getInactiveRemotePortCount());
other.setUpToDateCount(getUpToDateCount());
other.setLocallyModifiedCount(getLocallyModifiedCount());
other.setStaleCount(getStaleCount());
other.setLocallyModifiedAndStaleCount(getLocallyModifiedAndStaleCount());
other.setStaleCount(getStaleCount());
return other;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector.ipc;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.UInt1Vector;
import org.apache.arrow.vector.UInt4Vector;
import org.apache.arrow.vector.UInt8Vector;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.arrow.vector.complex.impl.ComplexWriterImpl;
import org.apache.arrow.vector.complex.writer.BaseWriter;
import org.apache.arrow.vector.dictionary.DictionaryProvider;
import org.apache.arrow.vector.dictionary.DictionaryProvider.MapDictionaryProvider;
import org.apache.arrow.vector.types.pojo.Schema;
import org.apache.arrow.vector.util.Validator;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestJSONFile extends BaseFileTest {
private static final Logger LOGGER = LoggerFactory.getLogger(TestJSONFile.class);
@Test
public void testNoBatches() throws IOException {
File file = new File("target/no_batches.json");
try (BufferAllocator originalVectorAllocator =
allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
StructVector parent = StructVector.empty("parent", originalVectorAllocator)) {
BaseWriter.ComplexWriter writer = new ComplexWriterImpl("root", parent);
BaseWriter.StructWriter rootWriter = writer.rootAsStruct();
rootWriter.integer("int");
rootWriter.uInt1("uint1");
rootWriter.bigInt("bigInt");
rootWriter.float4("float");
JsonFileWriter jsonWriter = new JsonFileWriter(file, JsonFileWriter.config().pretty(true));
jsonWriter.start(new VectorSchemaRoot(parent.getChild("root")).getSchema(), null);
jsonWriter.close();
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
}
}
@Test
public void testWriteRead() throws IOException {
File file = new File("target/mytest.json");
int count = COUNT;
// write
try (BufferAllocator originalVectorAllocator =
allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
StructVector parent = StructVector.empty("parent", originalVectorAllocator)) {
writeData(count, parent);
writeJSON(file, new VectorSchemaRoot(parent.getChild("root")), null);
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateContent(count, root);
}
}
}
@Test
public void testWriteReadComplexJSON() throws IOException {
File file = new File("target/mytest_complex.json");
int count = COUNT;
// write
try (
BufferAllocator originalVectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
StructVector parent = StructVector.empty("parent", originalVectorAllocator)) {
writeComplexData(count, parent);
writeJSON(file, new VectorSchemaRoot(parent.getChild("root")), null);
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator);
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateComplexContent(count, root);
}
}
}
@Test
public void testWriteComplexJSON() throws IOException {
File file = new File("target/mytest_write_complex.json");
int count = COUNT;
try (
BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
StructVector parent = StructVector.empty("parent", vectorAllocator)) {
writeComplexData(count, parent);
VectorSchemaRoot root = new VectorSchemaRoot(parent.getChild("root"));
validateComplexContent(root.getRowCount(), root);
writeJSON(file, root, null);
}
}
public void writeJSON(File file, VectorSchemaRoot root, DictionaryProvider provider) throws IOException {
JsonFileWriter writer = new JsonFileWriter(file, JsonFileWriter.config().pretty(true));
writer.start(root.getSchema(), provider);
writer.write(root);
writer.close();
}
@Test
public void testWriteReadUnionJSON() throws IOException {
File file = new File("target/mytest_write_union.json");
int count = COUNT;
try (
BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
StructVector parent = StructVector.empty("parent", vectorAllocator)) {
writeUnionData(count, parent);
printVectors(parent.getChildrenFromFields());
try (VectorSchemaRoot root = new VectorSchemaRoot(parent.getChild("root"))) {
validateUnionData(count, root);
writeJSON(file, root, null);
// read
try (BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE)) {
JsonFileReader reader = new JsonFileReader(file, readerAllocator);
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
try (VectorSchemaRoot rootFromJson = reader.read();) {
validateUnionData(count, rootFromJson);
Validator.compareVectorSchemaRoot(root, rootFromJson);
}
}
}
}
}
@Test
public void testWriteReadDateTimeJSON() throws IOException {
File file = new File("target/mytest_datetime.json");
int count = COUNT;
// write
try (
BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
StructVector parent = StructVector.empty("parent", vectorAllocator)) {
writeDateTimeData(count, parent);
printVectors(parent.getChildrenFromFields());
VectorSchemaRoot root = new VectorSchemaRoot(parent.getChild("root"));
validateDateTimeContent(count, root);
writeJSON(file, new VectorSchemaRoot(parent.getChild("root")), null);
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateDateTimeContent(count, root);
}
}
}
@Test
public void testWriteReadDictionaryJSON() throws IOException {
File file = new File("target/mytest_dictionary.json");
// write
try (
BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE)
) {
MapDictionaryProvider provider = new MapDictionaryProvider();
try (VectorSchemaRoot root = writeFlatDictionaryData(vectorAllocator, provider)) {
printVectors(root.getFieldVectors());
validateFlatDictionary(root, provider);
writeJSON(file, root, provider);
}
// Need to close dictionary vectors
for (long id : provider.getDictionaryIds()) {
provider.lookup(id).getVector().close();
}
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateFlatDictionary(root, reader);
}
}
}
@Test
public void testWriteReadNestedDictionaryJSON() throws IOException {
File file = new File("target/mytest_dict_nested.json");
// data being written:
// [['foo', 'bar'], ['foo'], ['bar']] -> [[0, 1], [0], [1]]
// write
try (
BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE)
) {
MapDictionaryProvider provider = new MapDictionaryProvider();
try (VectorSchemaRoot root = writeNestedDictionaryData(vectorAllocator, provider)) {
printVectors(root.getFieldVectors());
validateNestedDictionary(root, provider);
writeJSON(file, root, provider);
}
// Need to close dictionary vectors
for (long id : provider.getDictionaryIds()) {
provider.lookup(id).getVector().close();
}
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateNestedDictionary(root, reader);
}
}
}
@Test
public void testWriteReadDecimalJSON() throws IOException {
File file = new File("target/mytest_decimal.json");
// write
try (BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
VectorSchemaRoot root = writeDecimalData(vectorAllocator)) {
printVectors(root.getFieldVectors());
validateDecimalData(root);
writeJSON(file, root, null);
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateDecimalData(root);
}
}
}
@Test
public void testSetStructLength() throws IOException {
File file = new File("../../docs/source/format/integration_json_examples/struct.json");
if (!file.exists()) {
file = new File("../docs/source/format/integration_json_examples/struct.json");
}
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
FieldVector vector = root.getVector("struct_nullable");
Assert.assertEquals(7, vector.getValueCount());
}
}
}
@Test
public void testWriteReadVarBinJSON() throws IOException {
File file = new File("target/mytest_varbin.json");
int count = COUNT;
// write
try (
BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
StructVector parent = StructVector.empty("parent", vectorAllocator)) {
writeVarBinaryData(count, parent);
VectorSchemaRoot root = new VectorSchemaRoot(parent.getChild("root"));
validateVarBinary(count, root);
writeJSON(file, new VectorSchemaRoot(parent.getChild("root")), null);
}
// read
try (BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateVarBinary(count, root);
}
}
}
@Test
public void testWriteReadMapJSON() throws IOException {
File file = new File("target/mytest_map.json");
// write
try (BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
VectorSchemaRoot root = writeMapData(vectorAllocator)) {
printVectors(root.getFieldVectors());
validateMapData(root);
writeJSON(file, root, null);
}
// read
try (BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateMapData(root);
}
}
}
@Test
public void testWriteReadNullJSON() throws IOException {
File file = new File("target/mytest_null.json");
int valueCount = 10;
// write
try (BufferAllocator vectorAllocator = allocator.newChildAllocator("original vectors", 0, Integer.MAX_VALUE);
VectorSchemaRoot root = writeNullData(valueCount)) {
printVectors(root.getFieldVectors());
validateNullData(root, valueCount);
writeJSON(file, root, null);
}
// read
try (
BufferAllocator readerAllocator = allocator.newChildAllocator("reader", 0, Integer.MAX_VALUE);
JsonFileReader reader = new JsonFileReader(file, readerAllocator)
) {
Schema schema = reader.start();
LOGGER.debug("reading schema: " + schema);
// initialize vectors
try (VectorSchemaRoot root = reader.read();) {
validateNullData(root, valueCount);
}
}
}
@Test
public void testNoOverFlowWithUINT() {
try (final UInt8Vector uInt8Vector = new UInt8Vector("uint8", allocator);
final UInt4Vector uInt4Vector = new UInt4Vector("uint4", allocator);
final UInt1Vector uInt1Vector = new UInt1Vector("uint1", allocator)) {
long[] longValues = new long[]{Long.MIN_VALUE, Long.MAX_VALUE, -1L};
uInt8Vector.allocateNew(3);
uInt8Vector.setValueCount(3);
for (int i = 0; i < longValues.length; i++) {
uInt8Vector.set(i, longValues[i]);
long readValue = uInt8Vector.getObjectNoOverflow(i).longValue();
assertEquals(readValue, longValues[i]);
}
int[] intValues = new int[]{Integer.MIN_VALUE, Integer.MAX_VALUE, -1};
uInt4Vector.allocateNew(3);
uInt4Vector.setValueCount(3);
for (int i = 0; i < intValues.length; i++) {
uInt4Vector.set(i, intValues[i]);
int actualValue = (int) UInt4Vector.getNoOverflow(uInt4Vector.getDataBuffer(), i);
assertEquals(intValues[i], actualValue);
}
byte[] byteValues = new byte[]{Byte.MIN_VALUE, Byte.MAX_VALUE, -1};
uInt1Vector.allocateNew(3);
uInt1Vector.setValueCount(3);
for (int i = 0; i < byteValues.length; i++) {
uInt1Vector.set(i, byteValues[i]);
byte actualValue = (byte) UInt1Vector.getNoOverflow(uInt1Vector.getDataBuffer(), i);
assertEquals(byteValues[i], actualValue);
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryShardContext;
/**
* A mapper that indexes the field names of a document under <code>_field_names</code>. This mapper is typically useful in order
* to have fast <code>exists</code> and <code>missing</code> queries/filters.
*
* Added in Elasticsearch 1.3.
*/
public class FieldNamesFieldMapper extends MetadataFieldMapper {
public static final String NAME = "_field_names";
public static final String CONTENT_TYPE = "_field_names";
public static class Defaults {
public static final String NAME = FieldNamesFieldMapper.NAME;
public static final MappedFieldType FIELD_TYPE = new FieldNamesFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
}
public static class Builder extends MetadataFieldMapper.Builder<Builder, FieldNamesFieldMapper> {
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
}
@Override
@Deprecated
public Builder index(boolean index) {
return super.index(index);
}
@Override
public FieldNamesFieldMapper build(BuilderContext context) {
setupFieldType(context);
fieldType.setHasDocValues(false);
FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldType)fieldType;
return new FieldNamesFieldMapper(fieldType, context.indexSettings());
}
}
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
return new Builder(parserContext.mapperService().fullName(NAME));
}
@Override
public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext context) {
final Settings indexSettings = context.mapperService().getIndexSettings().getSettings();
if (fieldType != null) {
return new FieldNamesFieldMapper(indexSettings, fieldType);
} else {
return parse(NAME, Collections.emptyMap(), context)
.build(new BuilderContext(indexSettings, new ContentPath(1)));
}
}
}
public static final class FieldNamesFieldType extends TermBasedFieldType {
public FieldNamesFieldType() {
}
protected FieldNamesFieldType(FieldNamesFieldType ref) {
super(ref);
}
@Override
public FieldNamesFieldType clone() {
return new FieldNamesFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on _field_names");
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new UnsupportedOperationException("Terms query on _field_names is no longer supported");
}
}
private FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing.clone(), indexSettings);
}
private FieldNamesFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
super(NAME, null, fieldType, Defaults.FIELD_TYPE, indexSettings);
}
@Override
public FieldNamesFieldType fieldType() {
return (FieldNamesFieldType) super.fieldType();
}
@Override
public void preParse(ParseContext context) {
}
@Override
public void postParse(ParseContext context) throws IOException {
}
@Override
public void parse(ParseContext context) throws IOException {
// Adding values to the _field_names field is handled by the mappers for each field type
}
static Iterable<String> extractFieldNames(final String fullPath) {
return new Iterable<String>() {
@Override
public Iterator<String> iterator() {
return new Iterator<String>() {
int endIndex = nextEndIndex(0);
private int nextEndIndex(int index) {
while (index < fullPath.length() && fullPath.charAt(index) != '.') {
index += 1;
}
return index;
}
@Override
public boolean hasNext() {
return endIndex <= fullPath.length();
}
@Override
public String next() {
final String result = fullPath.substring(0, endIndex);
endIndex = nextEndIndex(endIndex + 1);
return result;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
for (ParseContext.Document document : context) {
final List<String> paths = new ArrayList<>(document.getFields().size());
String previousPath = ""; // used as a sentinel - field names can't be empty
for (IndexableField field : document.getFields()) {
final String path = field.name();
if (path.equals(previousPath)) {
// Sometimes mappers create multiple Lucene fields, eg. one for indexing,
// one for doc values and one for storing. Deduplicating is not required
// for correctness but this simple check helps save utf-8 conversions and
// gives Lucene fewer values to deal with.
continue;
}
paths.add(path);
previousPath = path;
}
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
document.add(new Field(fieldType().name(), fieldName, fieldType()));
}
}
}
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
if (includeDefaults == false) {
return builder;
}
builder.startObject(NAME);
if (includeDefaults) {
builder.field("enabled", true);
}
builder.endObject();
return builder;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl.engine;
import java.util.Objects;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Exchange;
import org.apache.camel.NamedNode;
import org.apache.camel.NamedRoute;
import org.apache.camel.Route;
import org.apache.camel.spi.ExchangeFormatter;
import org.apache.camel.spi.Tracer;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.PatternHelper;
import org.apache.camel.support.builder.ExpressionBuilder;
import org.apache.camel.support.processor.DefaultExchangeFormatter;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.support.LoggerHelper.getLineNumberLoggerName;
/**
* Default {@link Tracer} implementation that will log traced messages to the logger named
* <tt>org.apache.camel.Tracing</tt>.
*/
public class DefaultTracer extends ServiceSupport implements CamelContextAware, Tracer {
private static final String TRACING_OUTPUT = "%-4.4s [%-12.12s] [%-33.33s]";
// use a fixed logger name so easy to spot
private static final Logger LOG = LoggerFactory.getLogger("org.apache.camel.Tracing");
private String tracingFormat = TRACING_OUTPUT;
private CamelContext camelContext;
private boolean enabled = true;
private boolean standby;
private long traceCounter;
private ExchangeFormatter exchangeFormatter;
private String tracePattern;
private transient String[] patterns;
private boolean traceBeforeAndAfterRoute = true;
public DefaultTracer() {
DefaultExchangeFormatter formatter = new DefaultExchangeFormatter();
formatter.setShowExchangeId(true);
formatter.setShowExchangePattern(false);
formatter.setMultiline(false);
formatter.setShowHeaders(false);
formatter.setStyle(DefaultExchangeFormatter.OutputStyle.Default);
setExchangeFormatter(formatter);
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public void traceBeforeNode(NamedNode node, Exchange exchange) {
if (shouldTrace(node)) {
traceCounter++;
String routeId = ExpressionBuilder.routeIdExpression().evaluate(exchange, String.class);
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string and the format cuts this to
// 33 characters, anyway. Cut this to 50 characters. This will give enough space for removing
// characters in the sanitizeUri method and will be reasonably fast
String label = URISupport.sanitizeUri(StringHelper.limitLength(node.getLabel(), 50));
StringBuilder sb = new StringBuilder();
sb.append(String.format(tracingFormat, " ", routeId, label));
sb.append(" ");
String data = exchangeFormatter.format(exchange);
sb.append(data);
String out = sb.toString();
dumpTrace(out, node);
}
}
@Override
public void traceAfterNode(NamedNode node, Exchange exchange) {
// noop
}
@Override
public void traceBeforeRoute(NamedRoute route, Exchange exchange) {
if (!traceBeforeAndAfterRoute) {
return;
}
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string and the format cuts this to
// 33 characters, anyway. Cut this to 50 characters. This will give enough space for removing
// characters in the sanitizeUri method and will be reasonably fast
String uri = route.getEndpointUrl();
String label = "from[" + URISupport.sanitizeUri(StringHelper.limitLength(uri, 50) + "]");
// the arrow has a * if its a new exchange that is starting
boolean original = route.getRouteId().equals(exchange.getFromRouteId());
String arrow = original ? "*-->" : "--->";
// we need to capture original source:line-number
if (original && camelContext.isDebugging()) {
int line = route.getInput().getLineNumber();
String loc = route.getInput().getLocation();
}
StringBuilder sb = new StringBuilder();
sb.append(String.format(tracingFormat, arrow, route.getRouteId(), label));
sb.append(" ");
String data = exchangeFormatter.format(exchange);
sb.append(data);
String out = sb.toString();
dumpTrace(out, route);
}
@Override
public void traceAfterRoute(Route route, Exchange exchange) {
// noop
}
@Override
public void traceAfterRoute(NamedRoute route, Exchange exchange) {
if (!traceBeforeAndAfterRoute) {
return;
}
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string and the format cuts this to
// 33 characters, anyway. Cut this to 50 characters. This will give enough space for removing
// characters in the sanitizeUri method and will be reasonably fast
String uri = route.getEndpointUrl();
String label = "from[" + URISupport.sanitizeUri(StringHelper.limitLength(uri, 50) + "]");
// the arrow has a * if its an exchange that is done
boolean original = route.getRouteId().equals(exchange.getFromRouteId());
String arrow = original ? "*<--" : "<---";
StringBuilder sb = new StringBuilder();
sb.append(String.format(tracingFormat, arrow, route.getRouteId(), label));
sb.append(" ");
String data = exchangeFormatter.format(exchange);
sb.append(data);
String out = sb.toString();
dumpTrace(out, route);
}
@Override
public boolean shouldTrace(NamedNode definition) {
if (!enabled) {
return false;
}
boolean pattern = true;
if (patterns != null) {
pattern = shouldTracePattern(definition);
}
if (LOG.isTraceEnabled()) {
LOG.trace("Should trace evaluated {} -> pattern: {}", definition.getId(), pattern);
}
return pattern;
}
@Override
public long getTraceCounter() {
return traceCounter;
}
@Override
public void resetTraceCounter() {
traceCounter = 0;
}
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
@Override
public boolean isStandby() {
return standby;
}
@Override
public void setStandby(boolean standby) {
this.standby = standby;
}
@Override
public String getTracePattern() {
return tracePattern;
}
@Override
public void setTracePattern(String tracePattern) {
this.tracePattern = tracePattern;
if (tracePattern != null) {
// the pattern can have multiple nodes separated by comma
this.patterns = tracePattern.split(",");
} else {
this.patterns = null;
}
}
@Override
public boolean isTraceBeforeAndAfterRoute() {
return traceBeforeAndAfterRoute;
}
@Override
public void setTraceBeforeAndAfterRoute(boolean traceBeforeAndAfterRoute) {
this.traceBeforeAndAfterRoute = traceBeforeAndAfterRoute;
}
@Override
public ExchangeFormatter getExchangeFormatter() {
return exchangeFormatter;
}
@Override
public void setExchangeFormatter(ExchangeFormatter exchangeFormatter) {
this.exchangeFormatter = exchangeFormatter;
}
protected void dumpTrace(String out, Object node) {
String name = getLineNumberLoggerName(node);
if (name != null) {
Logger log = LoggerFactory.getLogger(name);
log.info(out);
} else {
LOG.info(out);
}
}
protected boolean shouldTracePattern(NamedNode definition) {
for (String pattern : patterns) {
// match either route id, or node id
String id = definition.getId();
// use matchPattern method from endpoint helper that has a good matcher we use in Camel
if (PatternHelper.matchPattern(id, pattern)) {
return true;
}
String routeId = CamelContextHelper.getRouteId(definition);
if (routeId != null && !Objects.equals(routeId, id)) {
if (PatternHelper.matchPattern(routeId, pattern)) {
return true;
}
}
}
// not matched the pattern
return false;
}
@Override
protected void doStart() throws Exception {
if (getCamelContext().getTracingLoggingFormat() != null) {
tracingFormat = getCamelContext().getTracingLoggingFormat();
}
}
@Override
protected void doStop() throws Exception {
// noop
}
}
| |
package com.dailystudio.dataobject;
import java.util.ArrayList;
import java.util.List;
import com.dailystudio.development.Logger;
import com.dailystudio.utils.ClassNameUtils;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
public class DatabaseObject {
public static final int VERSION_LATEST = Column.VERSION_LATEST;
public static final int VERSION_START = Column.VERSION_1;
private final static String TABLE_SQL_BASE = "CREATE TABLE IF NOT EXISTS ";
private ContentValues mValues;
protected Template mTemplate;
protected Context mContext;
protected final int mVersion;
public DatabaseObject(Context context) {
this(context, VERSION_START);
}
public DatabaseObject(Context context, int version) {
mContext = context;
initMembers();
mVersion = version;
}
private void initMembers() {
mValues = new ContentValues();
mTemplate = new Template();
}
protected void setTemplate(Template templ) {
if (templ == null) {
return;
}
mTemplate = templ;
}
public Template getTemplate() {
return mTemplate;
}
public final ContentValues getValues() {
return mValues;
}
public final List<Column> listNonEmptyColumns() {
if (mTemplate == null) {
return null;
}
if (mValues == null) {
return null;
}
List<Column> columns = mTemplate.listColumns();
if (columns == null || columns.size() <= 0) {
return null;
}
List<Column> nonEmptyColumns = new ArrayList<Column>();
int colVer = VERSION_LATEST;
for (Column col: columns) {
colVer = col.getVerion();
if (colVer > mVersion) {
continue;
}
if (mValues.containsKey(col.getName())) {
nonEmptyColumns.add(col);
}
}
return nonEmptyColumns;
}
public void setValue(String colName, Object value) {
if (colName == null || value == null) {
return;
}
if (mTemplate == null) {
return;
}
setValue(mTemplate.getColumn(colName), value);
}
public void setValue(Column column, Object value) {
if (column == null || value == null) {
return;
}
if (mTemplate == null) {
return;
}
if (mTemplate.containsColumn(column) == false) {
Logger.warn("no such column(name: %s)", column.getName());
return;
}
if (column.matchColumnType(value) == false) {
Logger.warn("unmatched column(type: %s)", column.getType());
return;
}
final int colVer = column.getVerion();
if (colVer > mVersion) {
Logger.warn("column(name: %s, ver: %d) is NOT match object version %d",
column.getName(), colVer,
mVersion);
return;
}
if (mValues == null) {
return;
}
column.setValue(mValues, value);
}
public Object getValue(String colName) {
if (colName == null) {
return null;
}
if (mTemplate == null) {
return null;
}
return getValue(mTemplate.getColumn(colName));
}
public Object getValue(Column column) {
if (column == null) {
return null;
}
if (mTemplate == null) {
return null;
}
if (mTemplate.containsColumn(column) == false) {
Logger.warn("no such column(name: %s)", column.getName());
Logger.debug("columns: [%s]", mTemplate.listColumns());
return null;
}
final int colVer = column.getVerion();
if (colVer > mVersion) {
Logger.warn("column(name: %s, ver: %d) is NOT match object version %d",
column.getName(), colVer,
mVersion);
return null;
}
if (mValues == null) {
return null;
}
return column.getValue(mValues);
}
public int getIntegerValue(String colName) {
if (colName == null) {
return 0;
}
if (mTemplate == null) {
return 0;
}
return getIntegerValue(mTemplate.getColumn(colName));
}
public int getIntegerValue(Column column) {
if (column == null) {
return 0;
}
Object val = getValue(column);
if (val == null || val instanceof Integer == false) {
return 0;
}
Integer intVal = (Integer)val;
return intVal.intValue();
}
public long getLongValue(String colName) {
if (colName == null) {
return 0l;
}
if (mTemplate == null) {
return 0l;
}
return getLongValue(mTemplate.getColumn(colName));
}
public long getLongValue(Column column) {
if (column == null) {
return 0l;
}
Object val = getValue(column);
if (val == null || val instanceof Long == false) {
return 0l;
}
Long longVal = (Long)val;
return longVal.longValue();
}
public double getDoubleValue(String colName) {
if (colName == null) {
return .0f;
}
if (mTemplate == null) {
return .0f;
}
return getDoubleValue(mTemplate.getColumn(colName));
}
public double getDoubleValue(Column column) {
if (column == null) {
return .0;
}
Object val = getValue(column);
if (val == null || val instanceof Double == false) {
return .0;
}
Double doubleVal = (Double)val;
return doubleVal.doubleValue();
}
public String getTextValue(String colName) {
if (colName == null) {
return null;
}
if (mTemplate == null) {
return null;
}
return getTextValue(mTemplate.getColumn(colName));
}
public String getTextValue(Column column) {
if (column == null) {
return null;
}
Object val = getValue(column);
if (val == null || val instanceof String == false) {
return null;
}
String strVal = (String)val;
return strVal;
}
public byte[] getBlobValue(String colName) {
if (colName == null) {
return null;
}
if (mTemplate == null) {
return null;
}
return getBlobValue(mTemplate.getColumn(colName));
}
public byte[] getBlobValue(Column column) {
if (column == null) {
return null;
}
Object val = getValue(column);
// Logger.debug("val = %s", val);
if (val == null || val instanceof byte[] == false) {
return null;
}
return (byte[])val;
}
public boolean isEmpty() {
if (mValues == null || mValues.size() <= 0) {
return true;
}
return false;
}
public Intent convertToIntent() {
if (mValues == null) {
return null;
}
if (mTemplate == null) {
return null;
}
Intent i = new Intent();
List<Column> columns = mTemplate.listColumns();
if (columns == null) {
return null;
}
for (Column column: columns) {
if (column.getVerion() > mVersion) {
continue;
}
column.attachValueTo(i, mValues);
}
return i;
}
public void fillValuesFromCursor(Cursor c) {
if (c == null) {
return;
}
if (mValues == null) {
return;
}
if (mTemplate == null) {
return;
}
List<Column> columns = mTemplate.listColumns();
if (columns == null) {
return;
}
for (Column column: columns) {
if (column.getVerion() > mVersion) {
continue;
}
column.parseValueFrom(c, mValues);
}
}
public String toSQLSelectionString() {
if (isEmpty()) {
return "";
}
if (mTemplate == null) {
return "";
}
final List<Column> columns = mTemplate.listColumns();
if (columns == null || columns.size() <= 0) {
return "";
}
final List<String> parts = new ArrayList<String>();
String valStr = null;
Object value = null;
for (Column column: columns) {
value = getValue(column);
if (value == null) {
continue;
}
valStr = column.convertValueToString(value);
if (valStr == null) {
continue;
}
/*
* DATE: 2011/10/25
* AUTHOR: Nan YE
* CONTENT: remove \' around value, this will be handle
* by Column.convertValueToString(). '\ around
* numeric value will cause mismatching during
* execute "select" with some runtime defined
* column (e.g. (longa - longb) > '0')
*/
parts.add(String.format("%s = %s",
column.getName(),
valStr));
}
final int partsCount = parts.size();
if (partsCount <= 0) {
return "";
}
StringBuilder builder = new StringBuilder();
for (int i = 0; i < partsCount; i++) {
builder.append(parts.get(i));
if (i != partsCount - 1) {
builder.append(" AND ");
}
}
return builder.toString();
}
public String handleNullProjection() {
return null;
}
public int getVersion() {
return mVersion;
}
public String[] toSQLProjection() {
if (mTemplate == null) {
return null;
}
final List<Column> columns = mTemplate.listColumns();
if (columns == null) {
return null;
}
final int size = columns.size();
if (size <= 0) {
return null;
}
String[] projection = new String[size];
Column column = null;
for (int i = 0; i < size; i++) {
column = columns.get(i);
if (column.getVerion() > mVersion) {
continue;
}
projection[i] = column.getName();
}
return projection;
}
public String toSQLTableCreationString() {
final String table = classToTable(getClass());
if (table == null) {
return null;
}
final Template tmpl = getTemplate();
if (tmpl == null) {
return null;
}
StringBuilder sqlBuilder = new StringBuilder(TABLE_SQL_BASE);
sqlBuilder.append(table).append(" ( ");
final List<Column> columns = tmpl.listColumns();
if (columns == null) {
return null;
}
final List<Column> filterColumns = new ArrayList<Column>();
for (Column col: columns) {
if (col.getVerion() > mVersion) {
continue;
}
filterColumns.add(col);
}
final int size = filterColumns.size();
if (size <= 0) {
return null;
}
Column column = null;
for (int i = 0; i < size; i++) {
column = filterColumns.get(i);
sqlBuilder.append(column.toString());
if (i != size - 1) {
sqlBuilder.append(", ");
} else {
sqlBuilder.append(" );");
}
}
return sqlBuilder.toString();
}
public static String classToTable(Class<? extends DatabaseObject> klass) {
String className = ClassNameUtils.getClassName(klass);
if (className == null) {
return null;
}
return className.replace('.', '_').replace('$', '_');
}
public static String classToDatabase(Class<? extends DatabaseObject> klass) {
String pkgName = ClassNameUtils.getPackageName(klass);
if (pkgName == null) {
return null;
}
String className = ClassNameUtils.getClassName(klass);
if (className == null) {
return null;
}
return String.format("%s.%s.db",
pkgName,
className.replace('.', '_').replace('$', '_'));
}
}
| |
package com.bernard.beaconportal.activities.preferences;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import android.content.SharedPreferences;
import com.bernard.beaconportal.activities.Account;
import com.bernard.beaconportal.activities.Account.FolderMode;
import com.bernard.beaconportal.activities.Account.SortType;
import com.bernard.beaconportal.activities.MAIL;
import com.bernard.beaconportal.activities.R;
import com.bernard.beaconportal.activities.crypto.Apg;
import com.bernard.beaconportal.activities.mail.store.StorageManager;
import com.bernard.beaconportal.activities.preferences.Settings.BooleanSetting;
import com.bernard.beaconportal.activities.preferences.Settings.ColorSetting;
import com.bernard.beaconportal.activities.preferences.Settings.EnumSetting;
import com.bernard.beaconportal.activities.preferences.Settings.IntegerRangeSetting;
import com.bernard.beaconportal.activities.preferences.Settings.InvalidSettingValueException;
import com.bernard.beaconportal.activities.preferences.Settings.PseudoEnumSetting;
import com.bernard.beaconportal.activities.preferences.Settings.SettingsDescription;
import com.bernard.beaconportal.activities.preferences.Settings.SettingsUpgrader;
import com.bernard.beaconportal.activities.preferences.Settings.StringSetting;
import com.bernard.beaconportal.activities.preferences.Settings.V;
public class AccountSettings {
public static final Map<String, TreeMap<Integer, SettingsDescription>> SETTINGS;
public static final Map<Integer, SettingsUpgrader> UPGRADERS;
static {
Map<String, TreeMap<Integer, SettingsDescription>> s = new LinkedHashMap<String, TreeMap<Integer, SettingsDescription>>();
/**
* When adding new settings here, be sure to increment
* {@link Settings.VERSION} and use that for whatever you add here.
*/
s.put("alwaysBcc", Settings.versions(new V(11, new StringSetting(""))));
s.put("alwaysShowCcBcc",
Settings.versions(new V(13, new BooleanSetting(false))));
s.put("archiveFolderName",
Settings.versions(new V(1, new StringSetting("Archive"))));
s.put("autoExpandFolderName",
Settings.versions(new V(1, new StringSetting("INBOX"))));
s.put("automaticCheckIntervalMinutes", Settings.versions(new V(1,
new IntegerResourceSetting(-1,
R.array.account_settings_check_frequency_values))));
s.put("chipColor",
Settings.versions(new V(1, new ColorSetting(0xFF0000FF))));
s.put("cryptoApp",
Settings.versions(new V(1, new StringSetting(Apg.NAME))));
s.put("cryptoAutoEncrypt",
Settings.versions(new V(3, new BooleanSetting(false))));
s.put("cryptoAutoSignature",
Settings.versions(new V(1, new BooleanSetting(false))));
s.put("defaultQuotedTextShown", Settings.versions(new V(1,
new BooleanSetting(Account.DEFAULT_QUOTED_TEXT_SHOWN))));
s.put("deletePolicy", Settings.versions(new V(1,
new DeletePolicySetting(Account.DELETE_POLICY_NEVER))));
s.put("displayCount", Settings.versions(new V(1,
new IntegerResourceSetting(MAIL.DEFAULT_VISIBLE_LIMIT,
R.array.account_settings_display_count_values))));
s.put("draftsFolderName",
Settings.versions(new V(1, new StringSetting("Drafts"))));
s.put("expungePolicy", Settings.versions(new V(1,
new StringResourceSetting(Account.EXPUNGE_IMMEDIATELY,
R.array.account_setup_expunge_policy_values))));
s.put("folderDisplayMode", Settings.versions(new V(1,
new EnumSetting<FolderMode>(FolderMode.class,
FolderMode.NOT_SECOND_CLASS))));
s.put("folderPushMode", Settings.versions(new V(1,
new EnumSetting<FolderMode>(FolderMode.class,
FolderMode.FIRST_CLASS))));
s.put("folderSyncMode", Settings.versions(new V(1,
new EnumSetting<FolderMode>(FolderMode.class,
FolderMode.FIRST_CLASS))));
s.put("folderTargetMode", Settings.versions(new V(1,
new EnumSetting<FolderMode>(FolderMode.class,
FolderMode.NOT_SECOND_CLASS))));
s.put("goToUnreadMessageSearch",
Settings.versions(new V(1, new BooleanSetting(false))));
s.put("idleRefreshMinutes", Settings.versions(new V(1,
new IntegerResourceSetting(24,
R.array.idle_refresh_period_values))));
s.put("inboxFolderName",
Settings.versions(new V(1, new StringSetting("INBOX"))));
s.put("led", Settings.versions(new V(1, new BooleanSetting(true))));
s.put("ledColor",
Settings.versions(new V(1, new ColorSetting(0xFF0000FF))));
s.put("localStorageProvider",
Settings.versions(new V(1, new StorageProviderSetting())));
s.put("markMessageAsReadOnView",
Settings.versions(new V(7, new BooleanSetting(true))));
s.put("maxPushFolders", Settings.versions(new V(1,
new IntegerRangeSetting(0, 100, 10))));
s.put("maximumAutoDownloadMessageSize",
Settings.versions(new V(
1,
new IntegerResourceSetting(
32768,
R.array.account_settings_autodownload_message_size_values))));
s.put("maximumPolledMessageAge", Settings.versions(new V(1,
new IntegerResourceSetting(-1,
R.array.account_settings_message_age_values))));
s.put("messageFormat", Settings.versions(new V(1,
new EnumSetting<Account.MessageFormat>(
Account.MessageFormat.class,
Account.DEFAULT_MESSAGE_FORMAT))));
s.put("messageFormatAuto", Settings.versions(new V(2,
new BooleanSetting(Account.DEFAULT_MESSAGE_FORMAT_AUTO))));
s.put("messageReadReceipt", Settings.versions(new V(1,
new BooleanSetting(Account.DEFAULT_MESSAGE_READ_RECEIPT))));
s.put("notificationUnreadCount",
Settings.versions(new V(1, new BooleanSetting(true))));
s.put("notifyMailCheck",
Settings.versions(new V(1, new BooleanSetting(false))));
s.put("notifyNewMail",
Settings.versions(new V(1, new BooleanSetting(false))));
s.put("notifySelfNewMail",
Settings.versions(new V(1, new BooleanSetting(true))));
s.put("pushPollOnConnect",
Settings.versions(new V(1, new BooleanSetting(true))));
s.put("quotePrefix", Settings.versions(new V(1, new StringSetting(
Account.DEFAULT_QUOTE_PREFIX))));
s.put("quoteStyle", Settings.versions(new V(1,
new EnumSetting<Account.QuoteStyle>(Account.QuoteStyle.class,
Account.DEFAULT_QUOTE_STYLE))));
s.put("replyAfterQuote", Settings.versions(new V(1, new BooleanSetting(
Account.DEFAULT_REPLY_AFTER_QUOTE))));
s.put("ring", Settings.versions(new V(1, new BooleanSetting(true))));
s.put("ringtone", Settings.versions(new V(1, new RingtoneSetting(
"content://settings/system/notification_sound"))));
s.put("searchableFolders", Settings.versions(new V(1,
new EnumSetting<Account.Searchable>(Account.Searchable.class,
Account.Searchable.ALL))));
s.put("sentFolderName",
Settings.versions(new V(1, new StringSetting("Sent"))));
s.put("sortTypeEnum", Settings.versions(new V(9,
new EnumSetting<SortType>(SortType.class,
Account.DEFAULT_SORT_TYPE))));
s.put("sortAscending", Settings.versions(new V(9, new BooleanSetting(
Account.DEFAULT_SORT_ASCENDING))));
s.put("showPicturesEnum",
Settings.versions(new V(1,
new EnumSetting<Account.ShowPictures>(
Account.ShowPictures.class,
Account.ShowPictures.NEVER))));
s.put("signatureBeforeQuotedText",
Settings.versions(new V(1, new BooleanSetting(false))));
s.put("spamFolderName",
Settings.versions(new V(1, new StringSetting("Spam"))));
s.put("stripSignature", Settings.versions(new V(2, new BooleanSetting(
Account.DEFAULT_STRIP_SIGNATURE))));
s.put("subscribedFoldersOnly",
Settings.versions(new V(1, new BooleanSetting(false))));
s.put("syncRemoteDeletions",
Settings.versions(new V(1, new BooleanSetting(true))));
s.put("trashFolderName",
Settings.versions(new V(1, new StringSetting("Trash"))));
s.put("useCompression.MOBILE",
Settings.versions(new V(1, new BooleanSetting(true))));
s.put("useCompression.OTHER",
Settings.versions(new V(1, new BooleanSetting(true))));
s.put("useCompression.WIFI",
Settings.versions(new V(1, new BooleanSetting(true))));
s.put("vibrate", Settings.versions(new V(1, new BooleanSetting(false))));
s.put("vibratePattern", Settings.versions(new V(1,
new IntegerResourceSetting(0,
R.array.account_settings_vibrate_pattern_values))));
s.put("vibrateTimes", Settings.versions(new V(1,
new IntegerResourceSetting(5,
R.array.account_settings_vibrate_times_label))));
s.put("allowRemoteSearch",
Settings.versions(new V(18, new BooleanSetting(true))));
s.put("remoteSearchNumResults",
Settings.versions(new V(
18,
new IntegerResourceSetting(
Account.DEFAULT_REMOTE_SEARCH_NUM_RESULTS,
R.array.account_settings_remote_search_num_results_values))));
s.put("remoteSearchFullText",
Settings.versions(new V(18, new BooleanSetting(false))));
SETTINGS = Collections.unmodifiableMap(s);
Map<Integer, SettingsUpgrader> u = new HashMap<Integer, SettingsUpgrader>();
UPGRADERS = Collections.unmodifiableMap(u);
}
public static Map<String, Object> validate(int version,
Map<String, String> importedSettings, boolean useDefaultValues) {
return Settings.validate(version, SETTINGS, importedSettings,
useDefaultValues);
}
public static Set<String> upgrade(int version,
Map<String, Object> validatedSettings) {
return Settings
.upgrade(version, UPGRADERS, SETTINGS, validatedSettings);
}
public static Map<String, String> convert(Map<String, Object> settings) {
return Settings.convert(settings, SETTINGS);
}
public static Map<String, String> getAccountSettings(
SharedPreferences storage, String uuid) {
Map<String, String> result = new HashMap<String, String>();
String prefix = uuid + ".";
for (String key : SETTINGS.keySet()) {
String value = storage.getString(prefix + key, null);
if (value != null) {
result.put(key, value);
}
}
return result;
}
/**
* An integer resource setting.
*
* <p>
* Basically a {@link PseudoEnumSetting} that is initialized from a resource
* array containing integer strings.
* </p>
*/
public static class IntegerResourceSetting extends
PseudoEnumSetting<Integer> {
private final Map<Integer, String> mMapping;
public IntegerResourceSetting(int defaultValue, int resId) {
super(defaultValue);
Map<Integer, String> mapping = new HashMap<Integer, String>();
String[] values = MAIL.app.getResources().getStringArray(resId);
for (String value : values) {
int intValue = Integer.parseInt(value);
mapping.put(intValue, value);
}
mMapping = Collections.unmodifiableMap(mapping);
}
@Override
protected Map<Integer, String> getMapping() {
return mMapping;
}
@Override
public Object fromString(String value)
throws InvalidSettingValueException {
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) {
throw new InvalidSettingValueException();
}
}
}
/**
* A string resource setting.
*
* <p>
* Basically a {@link PseudoEnumSetting} that is initialized from a resource
* array.
* </p>
*/
public static class StringResourceSetting extends PseudoEnumSetting<String> {
private final Map<String, String> mMapping;
public StringResourceSetting(String defaultValue, int resId) {
super(defaultValue);
Map<String, String> mapping = new HashMap<String, String>();
String[] values = MAIL.app.getResources().getStringArray(resId);
for (String value : values) {
mapping.put(value, value);
}
mMapping = Collections.unmodifiableMap(mapping);
}
@Override
protected Map<String, String> getMapping() {
return mMapping;
}
@Override
public Object fromString(String value)
throws InvalidSettingValueException {
if (!mMapping.containsKey(value)) {
throw new InvalidSettingValueException();
}
return value;
}
}
/**
* The notification ringtone setting.
*/
public static class RingtoneSetting extends SettingsDescription {
public RingtoneSetting(String defaultValue) {
super(defaultValue);
}
@Override
public Object fromString(String value) {
// TODO: add validation
return value;
}
}
/**
* The storage provider setting.
*/
public static class StorageProviderSetting extends SettingsDescription {
public StorageProviderSetting() {
super(null);
}
@Override
public Object getDefaultValue() {
return StorageManager.getInstance(MAIL.app).getDefaultProviderId();
}
@Override
public Object fromString(String value) {
StorageManager storageManager = StorageManager
.getInstance(MAIL.app);
Map<String, String> providers = storageManager
.getAvailableProviders();
if (providers.containsKey(value)) {
return value;
}
throw new RuntimeException("Validation failed");
}
}
/**
* The delete policy setting.
*/
public static class DeletePolicySetting extends PseudoEnumSetting<Integer> {
private Map<Integer, String> mMapping;
public DeletePolicySetting(int defaultValue) {
super(defaultValue);
Map<Integer, String> mapping = new HashMap<Integer, String>();
mapping.put(Account.DELETE_POLICY_NEVER, "NEVER");
mapping.put(Account.DELETE_POLICY_ON_DELETE, "DELETE");
mapping.put(Account.DELETE_POLICY_MARK_AS_READ, "MARK_AS_READ");
mMapping = Collections.unmodifiableMap(mapping);
}
@Override
protected Map<Integer, String> getMapping() {
return mMapping;
}
@Override
public Object fromString(String value)
throws InvalidSettingValueException {
try {
Integer deletePolicy = Integer.parseInt(value);
if (mMapping.containsKey(deletePolicy)) {
return deletePolicy;
}
} catch (NumberFormatException e) { /* do nothing */
}
throw new InvalidSettingValueException();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.sas;
import com.epam.parso.Column;
import com.epam.parso.SasFileProperties;
import com.epam.parso.SasFileReader;
import com.epam.parso.impl.SasFileReaderImpl;
import org.apache.drill.common.AutoCloseables;
import org.apache.drill.common.exceptions.CustomErrorContext;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework;
import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
import org.apache.drill.exec.physical.resultSet.RowSetLoader;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.metadata.ColumnMetadata;
import org.apache.drill.exec.record.metadata.MetadataUtils;
import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.vector.accessor.ScalarWriter;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.parquet.Strings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class SasBatchReader implements ManagedReader<FileScanFramework.FileSchemaNegotiator> {
private static final Logger logger = LoggerFactory.getLogger(SasBatchReader.class);
private final int maxRecords;
private final List<SasColumnWriter> writerList;
private FileSplit split;
private InputStream fsStream;
private SasFileReader sasFileReader;
private CustomErrorContext errorContext;
private RowSetLoader rowWriter;
private Object[] firstRow;
private String compressionMethod;
private String fileLabel;
private String fileType;
private String osName;
private String osType;
private String sasRelease;
private String sessionEncoding;
private String serverType;
private LocalDate dateCreated;
private LocalDate dateModified;
private enum IMPLICIT_STRING_COLUMN {
COMPRESSION_METHOD("_compression_method"),
ENCODING("_encoding"),
FILE_LABEL("_file_label"),
FILE_TYPE("_file_type"),
OS_NAME("_os_name"),
OS_TYPE("_os_type"),
SAS_RELEASE("_sas_release"),
SESSION_ENCODING("_session_encoding");
private final String fieldName;
IMPLICIT_STRING_COLUMN(String fieldName) {
this.fieldName = fieldName;
}
public String getFieldName() {
return fieldName;
}
}
private enum IMPLICIT_DATE_COLUMN {
CREATED_DATE("_date_created"),
MODIFIED_DATE("_date_modified");
private final String fieldName;
IMPLICIT_DATE_COLUMN(String fieldName) {
this.fieldName = fieldName;
}
public String getFieldName() {
return fieldName;
}
}
public static class SasReaderConfig {
protected final SasFormatPlugin plugin;
public SasReaderConfig(SasFormatPlugin plugin) {
this.plugin = plugin;
}
}
public SasBatchReader(int maxRecords) {
this.maxRecords = maxRecords;
writerList = new ArrayList<>();
}
@Override
public boolean open(FileScanFramework.FileSchemaNegotiator negotiator) {
split = negotiator.split();
errorContext = negotiator.parentErrorContext();
openFile(negotiator);
TupleMetadata schema;
if (negotiator.hasProvidedSchema()) {
schema = negotiator.providedSchema();
} else {
schema = buildSchema();
}
schema = addImplicitColumnsToSchema(schema);
negotiator.tableSchema(schema, true);
ResultSetLoader loader = negotiator.build();
rowWriter = loader.writer();
buildWriterList(schema);
return true;
}
private void openFile(FileScanFramework.FileSchemaNegotiator negotiator) {
try {
fsStream = negotiator.fileSystem().openPossiblyCompressedStream(split.getPath());
sasFileReader = new SasFileReaderImpl(fsStream);
firstRow = sasFileReader.readNext();
} catch (IOException e) {
throw UserException
.dataReadError(e)
.message("Unable to open SAS File %s", split.getPath())
.addContext(e.getMessage())
.addContext(errorContext)
.build(logger);
}
}
private TupleMetadata buildSchema() {
SchemaBuilder builder = new SchemaBuilder();
List<Column> columns = sasFileReader.getColumns();
int counter = 0;
for (Column column : columns) {
String fieldName = column.getName();
try {
MinorType type = null;
if (firstRow[counter] != null) {
type = getType(firstRow[counter].getClass().getSimpleName());
if (type == MinorType.BIGINT && !column.getFormat().isEmpty()) {
logger.debug("Found possible time");
type = MinorType.TIME;
}
} else {
// If the first row is null
String columnType = column.getType().getSimpleName();
type = getType(columnType);
}
builder.addNullable(fieldName, type);
} catch (Exception e) {
throw UserException.dataReadError()
.message("Error with column type: " + firstRow[counter].getClass().getSimpleName())
.addContext(errorContext)
.build(logger);
}
counter++;
}
return builder.buildSchema();
}
private void buildWriterList(TupleMetadata schema) {
int colIndex = 0;
for (MaterializedField field : schema.toFieldList()) {
String fieldName = field.getName();
MinorType type = field.getType().getMinorType();
if (type == MinorType.FLOAT8) {
writerList.add(new DoubleSasColumnWriter(colIndex, fieldName, rowWriter));
} else if (type == MinorType.BIGINT) {
writerList.add(new BigIntSasColumnWriter(colIndex, fieldName, rowWriter));
} else if (type == MinorType.DATE) {
writerList.add(new DateSasColumnWriter(colIndex, fieldName, rowWriter));
} else if (type == MinorType.TIME) {
writerList.add(new TimeSasColumnWriter(colIndex, fieldName, rowWriter));
} else if (type == MinorType.VARCHAR){
writerList.add(new StringSasColumnWriter(colIndex, fieldName, rowWriter));
} else {
throw UserException.dataReadError()
.message(fieldName + " is an unparsable data type: " + type.name() + ". The SAS reader does not support this data type.")
.addContext(errorContext)
.build(logger);
}
colIndex++;
}
}
private MinorType getType(String simpleType) {
switch (simpleType) {
case "String":
return MinorType.VARCHAR;
case "Numeric":
case "Double":
return MinorType.FLOAT8;
case "Long":
return MinorType.BIGINT;
case "Date":
return MinorType.DATE;
default:
throw UserException.dataReadError()
.message("SAS Reader does not support data type: " + simpleType)
.addContext(errorContext)
.build(logger);
}
}
private TupleMetadata addImplicitColumnsToSchema(TupleMetadata schema) {
SchemaBuilder builder = new SchemaBuilder();
ColumnMetadata colSchema;
builder.addAll(schema);
SasFileProperties fileProperties = sasFileReader.getSasFileProperties();
// Add String Metadata columns
for (IMPLICIT_STRING_COLUMN name : IMPLICIT_STRING_COLUMN.values()) {
colSchema = MetadataUtils.newScalar(name.getFieldName(), MinorType.VARCHAR, DataMode.OPTIONAL);
colSchema.setBooleanProperty(ColumnMetadata.EXCLUDE_FROM_WILDCARD, true);
builder.add(colSchema);
}
// Add Date Column Names
for (IMPLICIT_DATE_COLUMN name : IMPLICIT_DATE_COLUMN.values()) {
colSchema = MetadataUtils.newScalar(name.getFieldName(), MinorType.DATE, DataMode.OPTIONAL);
colSchema.setBooleanProperty(ColumnMetadata.EXCLUDE_FROM_WILDCARD, true);
builder.add(colSchema);
}
populateMetadata(fileProperties);
return builder.build();
}
@Override
public boolean next() {
while (!rowWriter.isFull()) {
if (!processNextRow()) {
return false;
}
}
return true;
}
@Override
public void close() {
AutoCloseables.closeSilently(fsStream);
}
private boolean processNextRow() {
if (rowWriter.limitReached(maxRecords)) {
return false;
}
Object[] row;
try {
// Process first row
if (firstRow != null) {
row = firstRow;
firstRow = null;
} else {
row = sasFileReader.readNext();
}
if (row == null) {
return false;
}
rowWriter.start();
for (int i = 0; i < row.length; i++) {
writerList.get(i).load(row);
}
// Write Metadata
writeMetadata(row.length);
rowWriter.save();
} catch (IOException e) {
throw UserException.dataReadError()
.message("Error reading SAS file: " + e.getMessage())
.addContext(errorContext)
.build(logger);
}
return true;
}
private void populateMetadata(SasFileProperties fileProperties) {
compressionMethod = fileProperties.getCompressionMethod();
fileLabel = fileProperties.getFileLabel();
fileType = fileProperties.getFileType();
osName = fileProperties.getOsName();
osType = fileProperties.getOsType();
sasRelease = fileProperties.getSasRelease();
sessionEncoding = fileProperties.getSessionEncoding();
serverType = fileProperties.getServerType();
dateCreated = convertDateToLocalDate(fileProperties.getDateCreated());
dateModified = convertDateToLocalDate(fileProperties.getDateCreated());
}
private void writeMetadata(int startIndex) {
((StringSasColumnWriter)writerList.get(startIndex)).load(compressionMethod);
((StringSasColumnWriter)writerList.get(startIndex+1)).load(fileLabel);
((StringSasColumnWriter)writerList.get(startIndex+2)).load(fileType);
((StringSasColumnWriter)writerList.get(startIndex+3)).load(osName);
((StringSasColumnWriter)writerList.get(startIndex+4)).load(osType);
((StringSasColumnWriter)writerList.get(startIndex+5)).load(sasRelease);
((StringSasColumnWriter)writerList.get(startIndex+6)).load(sessionEncoding);
((StringSasColumnWriter)writerList.get(startIndex+7)).load(serverType);
((DateSasColumnWriter)writerList.get(startIndex+8)).load(dateCreated);
((DateSasColumnWriter)writerList.get(startIndex+9)).load(dateModified);
}
private static LocalDate convertDateToLocalDate(Date date) {
return Instant.ofEpochMilli(date.toInstant().toEpochMilli())
.atZone(ZoneOffset.ofHours(0))
.toLocalDate();
}
public abstract static class SasColumnWriter {
final String columnName;
final ScalarWriter writer;
final int columnIndex;
public SasColumnWriter(int columnIndex, String columnName, ScalarWriter writer) {
this.columnIndex = columnIndex;
this.columnName = columnName;
this.writer = writer;
}
public abstract void load (Object[] row);
}
public static class StringSasColumnWriter extends SasColumnWriter {
StringSasColumnWriter (int columnIndex, String columnName, RowSetLoader rowWriter) {
super(columnIndex, columnName, rowWriter.scalar(columnName));
}
@Override
public void load(Object[] row) {
if (row[columnIndex] != null) {
writer.setString((String) row[columnIndex]);
}
}
public void load (String value) {
if (!Strings.isNullOrEmpty(value)) {
writer.setString(value);
}
}
}
public static class BigIntSasColumnWriter extends SasColumnWriter {
BigIntSasColumnWriter (int columnIndex, String columnName, RowSetLoader rowWriter) {
super(columnIndex, columnName, rowWriter.scalar(columnName));
}
@Override
public void load(Object[] row) {
writer.setLong((Long) row[columnIndex]);
}
}
public static class DateSasColumnWriter extends SasColumnWriter {
DateSasColumnWriter (int columnIndex, String columnName, RowSetLoader rowWriter) {
super(columnIndex, columnName, rowWriter.scalar(columnName));
}
@Override
public void load(Object[] row) {
LocalDate value = convertDateToLocalDate((Date)row[columnIndex]);
writer.setDate(value);
}
public void load(LocalDate date) {
writer.setDate(date);
}
}
public static class TimeSasColumnWriter extends SasColumnWriter {
TimeSasColumnWriter (int columnIndex, String columnName, RowSetLoader rowWriter) {
super(columnIndex, columnName, rowWriter.scalar(columnName));
}
@Override
public void load(Object[] row) {
int seconds = ((Long)row[columnIndex]).intValue();
LocalTime value = LocalTime.parse(formatSeconds(seconds));
writer.setTime(value);
}
private String formatSeconds(int timeInSeconds)
{
int hours = timeInSeconds / 3600;
int secondsLeft = timeInSeconds - hours * 3600;
int minutes = secondsLeft / 60;
int seconds = secondsLeft - minutes * 60;
StringBuilder formattedTime = new StringBuilder();
if (hours < 10) {
formattedTime.append("0");
}
formattedTime.append(hours).append(":");
if (minutes < 10) {
formattedTime.append("0");
}
formattedTime.append(minutes).append(":");
if (seconds < 10) {
formattedTime.append("0");
}
formattedTime.append(seconds);
return formattedTime.toString();
}
}
public static class DoubleSasColumnWriter extends SasColumnWriter {
DoubleSasColumnWriter (int columnIndex, String columnName, RowSetLoader rowWriter) {
super(columnIndex, columnName, rowWriter.scalar(columnName));
}
@Override
public void load(Object[] row) {
// The SAS reader does something strange with zeros. For whatever reason, even if the
// field is a floating point number, the value is returned as a long. This causes class
// cast exceptions.
if (row[columnIndex].equals(0L)) {
writer.setDouble(0.0);
} else {
writer.setDouble((Double) row[columnIndex]);
}
}
}
}
| |
/*
* Copyright 2012 - 2014 Future Interface
* This software is licensed under the terms of the MIT license.
*
*/
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fi.tech;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGL11;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import javax.microedition.khronos.opengles.GL10;
import com.fi.tech.GLWallpaperService.GLEngine;
import com.fi.tech.gesture.Paper3D;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
// Original code provided by Robert Green
// http://www.rbgrn.net/content/354-glsurfaceview-adapted-3d-live-wallpapers
public class GLWallpaperService extends WallpaperService
{
public static final String SHARED_PREFS_NAME = "GLWallpaperSettings";
protected static int mEngineCounter = 0;
public GLThread mThread = null;
static public EGLConfigChooser mEGLConfigChooser = new ConfigChooser(false);
static public EGLContextFactory mEGLContextFactory = new DefaultContextFactory();
static public EGLWindowSurfaceFactory mEGLWindowSurfaceFactory = new DefaultWindowSurfaceFactory();
@Override
public Engine onCreateEngine()
{
return new GLEngine();
}
// FIXME: Better to move this function out of GLWallpaperService and put it with other GL helper functions.
public static void checkEglError(String prompt, EGL10 egl)
{
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS)
{
Log.e("Paper3D", String.format("%s: EGL error: 0x%x", prompt, error));
}
}
public class GLEngine extends Engine
{
private int mEngineID;
private PContext mContext;
@Override
public void onTouchEvent(final MotionEvent event)
{
final int pointerCount = event.getPointerCount();
final int cursorIndex;
final int cursorState;
final int state;
int cursorIndexLocal = -1; /* Match all pointers. */
int action = event.getAction();
if (action == MotionEvent.ACTION_DOWN)
{
cursorState = 0;
state = 0;
}
else if (action == MotionEvent.ACTION_MOVE)
{
cursorState = 2;
state = 1;
}
else if ((action & MotionEvent.ACTION_POINTER_DOWN) == MotionEvent.ACTION_POINTER_DOWN)
{
cursorState = 0;
cursorIndexLocal = event.findPointerIndex(action & MotionEvent.ACTION_POINTER_ID_MASK);
state = 1;
}
else if ((action & MotionEvent.ACTION_POINTER_UP) == MotionEvent.ACTION_POINTER_UP)
{
cursorState = 3;
cursorIndexLocal = event.findPointerIndex(action & MotionEvent.ACTION_POINTER_ID_MASK);
state = 1;
}
else
{
cursorState = 3;
state = 2;
}
cursorIndex = cursorIndexLocal;
queueEvent(new Runnable()
{
public void run() {
if (mContext != null)
{
long touchEvent = mContext.touchEvent(pointerCount, state);
for (int i = 0; i < pointerCount; ++i)
{
int touchState = 1;
if ((cursorIndex < 0) || (cursorIndex == i))
{
touchState = cursorState;
}
mContext.touchCursor(
touchEvent,
i,
event.getPointerId(i),
event.getX(i),
event.getY(i),
event.getPressure(i),
event.getSize(i),
touchState,
event.getEventTime());
}
}
}
});
try
{
Thread.sleep(1);
}
catch (InterruptedException e) {
}
super.onTouchEvent(event);
}
public GLEngine()
{
super();
mEngineID = ++mEngineCounter;
// The thread is created if it has not. There is only one thread inside
// GLWallpaperService instance.
if (mThread == null)
{
mThread = new GLThread();
mThread.start();
}
mContext = null;
}
@Override
public void onVisibilityChanged(boolean visible)
{
if (visible)
{
onResume();
}
else
{
onPause();
}
super.onVisibilityChanged(visible);
}
@Override
public void onCreate(SurfaceHolder surfaceHolder)
{
super.onCreate(surfaceHolder);
Log.d("GLEngine"+mEngineID, "GLEngine.onCreate()");
setTouchEventsEnabled(true);
}
@Override
public void onDestroy()
{
Log.d("GLEngine"+mEngineID, "GLEngine.onDestroy()");
super.onDestroy();
}
@Override
public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
Log.d("GLEngine"+mEngineID, "onSurfaceChanged()");
mThread.onWindowResize(this, holder, width, height);
super.onSurfaceChanged(holder, format, width, height);
}
@Override
public void onSurfaceCreated(SurfaceHolder holder)
{
Log.d("GLEngine"+mEngineID, "onSurfaceCreated()");
mThread.onSurfaceCreated(holder, this);
super.onSurfaceCreated(holder);
}
@Override
public void onSurfaceDestroyed(SurfaceHolder holder)
{
Log.d("GLEngine"+mEngineID, "onSurfaceDestroyed()");
super.onSurfaceDestroyed(holder);
}
public void onPause()
{
Log.i("Paper3D", "GLEngine(" + mEngineID + "): onPause()");
mThread.onPause(this);
}
public void onResume()
{
Log.i("Paper3D", "GLEngine(" + mEngineID + "): onResume()");
mThread.onResume(this);
}
public void queueEvent(Runnable r)
{
mThread.queueEvent(r);
}
public void setContext(PContext context)
{
mContext = context;
}
}
}
class EngineRenderContext
{
private static EGL10 mEgl;
private static EGLDisplay mEglDisplay;
private static EGLConfig mEglConfig;
private static EGLContext mEglContext;
private static PContext mContext;
private EGLSurface mEglSurface;
private SurfaceHolder mSurf;
private int mWidth;
private int mHeight;
private boolean mActive;
private boolean mPaused;
private boolean mSurfaceChanged;
private boolean mSizeChanged;
public static void start()
{
Log.i("Paper3D", "EngineRenderContext: start()");
if (mEglDisplay == null || mEgl == null || mEglContext == null)
{
mEgl = (EGL10) EGLContext.getEGL();
mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (mEglConfig == null)
{
int[] version = new int[2];
mEgl.eglInitialize(mEglDisplay, version);
mEglConfig = GLWallpaperService.mEGLConfigChooser.chooseConfig(mEgl, mEglDisplay);
}
mEglContext = GLWallpaperService.mEGLContextFactory.createContext(mEgl, mEglDisplay, mEglConfig);
if (mEglContext == null || mEglContext == EGL10.EGL_NO_CONTEXT)
{
throw new RuntimeException("Failed to create a GL context.");
}
mContext = new PContext(Paper3D.CONTEXT_NAME);
}
else
{
Log.e("Paper3D", "EngineRenderContext: Start multiple times.");
}
}
public static void finish()
{
Log.i("Paper3D", "EngineRenderContext::finish()");
if (mContext != null)
{
mContext.uninitialize();
mContext = null;
}
if (mEglContext != null)
{
GLWallpaperService.mEGLContextFactory.destroyContext(mEgl, mEglDisplay, mEglContext);
}
if (mEglDisplay != null)
{
mEgl.eglTerminate(mEglDisplay);
}
mEglDisplay = null;
mEgl = null;
mEglConfig = null;
mEglContext = null;
}
public EngineRenderContext()
{
mEglSurface = null;
mSurf = null;
mActive = false;
mPaused = false;
mSurfaceChanged = false;
mSizeChanged = false;
mWidth = 0;
mHeight = 0;
}
public void changeSurface(SurfaceHolder holder, int width, int height)
{
if (mActive)
{
throw new RuntimeException("Cannot change surface when render is activity");
}
if (holder != mSurf)
{
mSurf = holder;
mSurfaceChanged = true;
}
if (mWidth != width || mHeight != height)
{
mWidth = width;
mHeight = height;
mSizeChanged = true;
}
destroySurface();
}
public void activate()
{
if (mActive)
{
throw new RuntimeException("Activate called when active.");
}
if (mEglContext == null)
{
throw new RuntimeException("No EGL context is available.");
}
if (mSurf == null)
{
throw new RuntimeException("No native surface is available.");
}
// Delay to create context/surface
if (mEglSurface == null)
{
createSurface();
}
mSurfaceChanged = true;
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext))
{
throw new RuntimeException("eglMakeCurrent failed.");
}
mActive = true;
}
public void deactivate()
{
if (!mActive)
{
throw new RuntimeException("Deactive called when inactive.");
}
if (!mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT))
{
throw new RuntimeException("eglMakeCurrent failed.");
}
mActive = false;
}
private void createSurface()
{
if (mActive)
{
throw new RuntimeException("createSurface() called when active.");
}
if (mEglSurface != null)
{
throw new RuntimeException("createSurface() called when mEglSurface is not null.");
}
mEglSurface = GLWallpaperService.mEGLWindowSurfaceFactory.createWindowSurface(mEgl, mEglDisplay, mEglConfig, mSurf);
if (mEglSurface == null || mEglSurface == EGL10.EGL_NO_SURFACE)
{
throw new RuntimeException("createWindowSurface failed");
}
}
private void destroySurface()
{
if (mActive)
{
throw new RuntimeException("destroySurface() called when active.");
}
if (mEglSurface != null)
{
GLWallpaperService.mEGLWindowSurfaceFactory.destroySurface(mEgl, mEglDisplay, mEglSurface);
mEglSurface = null;
}
}
public void pause()
{
mPaused = true;
}
public void resume()
{
mPaused = false;
}
public boolean swap()
{
if (!mActive)
{
new RuntimeException("swap() called but inactive.");
}
if (!available())
{
new RuntimeException("swap() called but inavailable.");
}
mEgl.eglSwapBuffers(mEglDisplay, mEglSurface);
return mEgl.eglGetError() != EGL11.EGL_CONTEXT_LOST;
}
public boolean active()
{
return mActive;
}
public boolean available()
{
return mSurf != null && !mPaused;
}
public void onSurfaceChanged(GL10 gl)
{
if (!mContext.isInitialized())
{
if (!mContext.initialize(mWidth, mHeight))
{
mContext = null;
throw new RuntimeException("Failed to create a Paper3D context.");
}
}
mContext.resize(mWidth, mHeight);
}
public void onDrawFrame(GL10 gl)
{
mContext.update();
}
public boolean popSizeChanged()
{
boolean ret = mSizeChanged;
mSizeChanged = false;
return ret;
}
public boolean popSurfaceChanged()
{
boolean ret = mSurfaceChanged;
mSurfaceChanged = false;
return ret;
}
public boolean popSurfaceOrSizeChanged()
{
boolean ret = mSizeChanged || mSurfaceChanged;
popSizeChanged();
popSurfaceChanged();
return ret;
}
public int surfaceWidth()
{
return mWidth;
}
public int surfaceHeight()
{
return mHeight;
}
public PContext getContext()
{
return mContext;
}
}
class GLThread extends Thread
{
private ArrayList<Runnable> mEventQueue = new ArrayList<Runnable>();
private ArrayList<Runnable> mCommandQueue = new ArrayList<Runnable>();
private static int MAX_EVENT_COUNT_PER_UPDATE = 100;
private HashMap<GLEngine, EngineRenderContext> renderContexts = new HashMap<GLEngine, EngineRenderContext>();
private EngineRenderContext activeRenderContext = null;
private final GLThreadManager sGLThreadManager = new GLThreadManager();
private GLThread mEglOwner;
// Once the thread is started, all accesses to the following member
// variables are protected by the sGLThreadManager monitor
public boolean mDone;
private boolean mEventsWaiting;
private boolean mCommandsWaiting;
GLThread()
{
super();
mDone = false;
}
@Override
public void run()
{
setName("GLThread " + getId());
try
{
guardedRun();
}
catch (InterruptedException e)
{
// fall thru and exit normally
}
}
private void guardedRun() throws InterruptedException
{
EngineRenderContext.start();
try
{
GL10 gl = null;
// This is our main activity thread's loop, we go until asked to quit.
while (!isDone())
{
// FIXME: eventsWaiting and commandsWaiting are not needed here as it might block the event handling.
boolean eventsWaiting = true;
boolean commandsWaiting = true;
synchronized (sGLThreadManager)
{
eventsWaiting = mEventsWaiting;
commandsWaiting = mCommandsWaiting;
}
if(true)
{
Runnable r = null;
while ((r = getCommand()) != null)
{
r.run();
}
}
// Try activate an engine.
// FIXME: why is render context now unrelated to Engine.
if (activeRenderContext == null)
{
Iterator<EngineRenderContext> iter = renderContexts.values().iterator();
EngineRenderContext renderContext;
while (iter.hasNext())
{
renderContext = iter.next();
if (renderContext.available())
{
activateContext(renderContext);
break;
}
}
}
if (PNativeLibrary.getInstance().isInitialized())
{
Runnable r = null;
int eventCounter = 0;
while ((r = getEvent()) != null)
{
if (eventCounter == 0)
{
Log.i("Paper3D", "GLThread: Process event ...");
}
try
{
r.run();
}
catch (Exception e)
{
}
++eventCounter;
// Limit the event processing time in order to avoid
// blocking other incoming events.
if (eventCounter >= MAX_EVENT_COUNT_PER_UPDATE)
{
break;
}
}
if (eventCounter > 0)
{
Log.i("Paper3D", "GLThread: Event was processed, count: " + eventCounter);
}
}
if (activeRenderContext != null)
{
if (activeRenderContext.popSurfaceOrSizeChanged())
{
Log.i("Paper3D", "GLThread: Surface has been changed.");
activeRenderContext.onSurfaceChanged(gl);
}
activeRenderContext.onDrawFrame(gl);
activeRenderContext.swap();
Thread.sleep(10);
}
else
{
synchronized (sGLThreadManager)
{
sGLThreadManager.wait();
}
}
}
}
finally
{
synchronized (sGLThreadManager)
{
EngineRenderContext.finish();
}
}
}
private boolean isDone()
{
return mDone;
}
private EngineRenderContext getEngineRenderContext(GLEngine engine)
{
EngineRenderContext renderContext;
if (renderContexts.containsKey(engine))
{
renderContext = (EngineRenderContext)renderContexts.get(engine);
}
else
{
renderContext = new EngineRenderContext();
engine.setContext(renderContext.getContext());
renderContexts.put(engine, renderContext);
}
return renderContext;
}
private void activateContext(EngineRenderContext context)
{
deactiveCurrentContext();
activeRenderContext = context;
activeRenderContext.activate();
}
private void deactiveCurrentContext()
{
if (activeRenderContext != null)
{
activeRenderContext.deactivate();
}
activeRenderContext = null;
}
public void onSurfaceCreated(final SurfaceHolder holder, final GLEngine engine)
{
Log.i("Paper3D", "GLThread: surfaceCreated()");
queueCommand( new Runnable()
{
@Override
public void run()
{
Log.i("Paper3D", "GLThread: SurfaceCreated on " + engine);
EngineRenderContext context = getEngineRenderContext(engine);
if (context.active())
{
deactiveCurrentContext();
}
context.changeSurface(holder, holder.getSurfaceFrame().width(), holder.getSurfaceFrame().height());
}
});
synchronized(sGLThreadManager)
{
sGLThreadManager.notifyAll();
}
}
public void onWindowResize(final GLEngine engine, final SurfaceHolder surf, final int width, final int height)
{
Log.i("Paper3D", "GLThread: onWindowResize()");
queueCommand( new Runnable()
{
@Override
public void run()
{
Log.i("Paper3D", "GLThread: Size Changed on " + engine);
EngineRenderContext context = getEngineRenderContext(engine);
if (context.active())
{
deactiveCurrentContext();
}
context.changeSurface(surf, width, height);
}
});
synchronized(sGLThreadManager)
{
sGLThreadManager.notifyAll();
}
}
public void surfaceDestroyed(final GLEngine engine) {
Log.i("Paper3D", "GLThread: surfaceDestroyed()");
queueCommand( new Runnable()
{
@Override
public void run()
{
Log.i("Paper3D", "GLThread: SurfaceDestroyed on " + engine);
EngineRenderContext context = getEngineRenderContext(engine);
if(context.active())
{
deactiveCurrentContext();
}
context.changeSurface(null, 0, 0);
}
});
synchronized(sGLThreadManager)
{
sGLThreadManager.notifyAll();
}
}
public void onPause(final GLEngine engine)
{
Log.i("Paper3D", "GLThread: onPause()");
queueCommand( new Runnable()
{
@Override
public void run()
{
Log.i("Paper3D", "GLThread: Pause on " + engine);
EngineRenderContext context = getEngineRenderContext(engine);
if(context.active()) {
deactiveCurrentContext();
}
context.pause();
}
});
synchronized(sGLThreadManager)
{
sGLThreadManager.notifyAll();
}
}
public void onResume(final GLEngine engine)
{
Log.i("Paper3D", "GLThread: onResume()");
queueCommand( new Runnable() {
@Override
public void run()
{
Log.i("Paper3D", "GLThread: Resume on " + engine);
EngineRenderContext context = getEngineRenderContext(engine);
context.resume();
}
});
synchronized(sGLThreadManager)
{
sGLThreadManager.notifyAll();
}
}
public void requestExitAndWait()
{
Log.i("Paper3D", "GLThread: requestExitAndWait()");
synchronized (sGLThreadManager)
{
mDone = true;
sGLThreadManager.notifyAll();
}
try
{
join();
}
catch (InterruptedException ex)
{
Thread.currentThread().interrupt();
}
}
public void queueEvent(Runnable r)
{
synchronized (this)
{
mEventQueue.add(r);
synchronized (sGLThreadManager)
{
mEventsWaiting = true;
sGLThreadManager.notifyAll();
}
}
}
public void queueCommand(Runnable r)
{
synchronized (this)
{
mCommandQueue.add(r);
synchronized (sGLThreadManager)
{
mCommandsWaiting = true;
sGLThreadManager.notifyAll();
}
}
}
private Runnable getCommand()
{
synchronized (this)
{
if (mCommandQueue.size() > 0)
{
return mCommandQueue.remove(0);
}
}
return null;
}
private Runnable getEvent()
{
synchronized (this)
{
if (mEventQueue.size() > 0)
{
return mEventQueue.remove(0);
}
}
return null;
}
private class GLThreadManager
{
}
}
interface EGLConfigChooser
{
EGLConfig chooseConfig(EGL10 egl, EGLDisplay display);
}
class ConfigChooser implements EGLConfigChooser
{
private int[] configAttribs;
/**
* @param antialias
*/
public ConfigChooser(boolean antialias)
{
if (antialias)
{
int[] conf = { EGL10.EGL_SAMPLES, 4, EGL10.EGL_SAMPLE_BUFFERS,
1, EGL10.EGL_RED_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_GREEN_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_BLUE_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_DEPTH_SIZE, 16, EGL10.EGL_RENDERABLE_TYPE, 4,
EGL10.EGL_STENCIL_SIZE, 8, EGL10.EGL_NONE };
configAttribs = conf;
}
else
{
int[] conf = { EGL10.EGL_SAMPLES, 0, EGL10.EGL_SAMPLE_BUFFERS,
0, EGL10.EGL_RED_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_GREEN_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_BLUE_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_DEPTH_SIZE, 16, EGL10.EGL_RENDERABLE_TYPE, 4,
EGL10.EGL_STENCIL_SIZE, 8, EGL10.EGL_NONE };
configAttribs = conf;
}
}
/**
* @param egl
* @param display
* @return
*/
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
int[] num_config = new int[1];
EGLConfig[] config = new EGLConfig[1];
egl.eglChooseConfig(display, configAttribs, config, 1, num_config);
return config[0];
}
}
// ////////////////////////////////////////////////////////////////////////////////////////////////////////
abstract class BaseConfigChooser implements EGLConfigChooser
{
public BaseConfigChooser(int[] configSpec)
{
mConfigSpec = configSpec;
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display)
{
int[] num_config = new int[1];
egl.eglChooseConfig(display, mConfigSpec, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, mConfigSpec, configs, numConfigs,
num_config);
EGLConfig config = chooseConfig(egl, display, configs);
if (config == null)
{
throw new IllegalArgumentException("No config chosen");
}
return config;
}
abstract EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs);
protected int[] mConfigSpec;
public static class ComponentSizeChooser extends BaseConfigChooser
{
public ComponentSizeChooser(int redSize, int greenSize, int blueSize,
int alphaSize, int depthSize, int stencilSize)
{
super(new int[]
{
EGL10.EGL_RED_SIZE, redSize,
EGL10.EGL_GREEN_SIZE, greenSize, EGL10.EGL_BLUE_SIZE,
blueSize, EGL10.EGL_ALPHA_SIZE, alphaSize,
EGL10.EGL_DEPTH_SIZE, depthSize, EGL10.EGL_STENCIL_SIZE,
stencilSize, EGL10.EGL_NONE
});
mValue = new int[1];
mRedSize = redSize;
mGreenSize = greenSize;
mBlueSize = blueSize;
mAlphaSize = alphaSize;
mDepthSize = depthSize;
mStencilSize = stencilSize;
}
@Override
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs)
{
EGLConfig closestConfig = null;
int closestDistance = 1000;
for (EGLConfig config : configs)
{
int d = findConfigAttrib(egl, display, config, EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0);
if (d >= mDepthSize && s >= mStencilSize)
{
int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config, EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config, EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0);
int distance = Math.abs(r - mRedSize)
+ Math.abs(g - mGreenSize)
+ Math.abs(b - mBlueSize)
+ Math.abs(a - mAlphaSize);
if (distance < closestDistance)
{
closestDistance = distance;
closestConfig = config;
}
}
}
return closestConfig;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue)
{
if (egl.eglGetConfigAttrib(display, config, attribute, mValue))
{
return mValue[0];
}
return defaultValue;
}
private int[] mValue;
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
}
/**
* This class will choose a supported surface as close to RGB565 as
* possible, with or without a depth buffer.
*
*/
public static class SimpleEGLConfigChooser extends ComponentSizeChooser
{
public SimpleEGLConfigChooser(boolean withDepthBuffer)
{
super(4, 4, 4, 0, withDepthBuffer ? 16 : 0, 0);
// Adjust target values. This way we'll accept a 4444 or
// 555 buffer if there's no 565 buffer available.
mRedSize = 5;
mGreenSize = 6;
mBlueSize = 5;
}
}
}
/**
* An interface for customizing the eglCreateWindowSurface and eglDestroySurface
* calls.
*
*
* This interface must be implemented by clients wishing to call
* {@link GLWallpaperService#setEGLWindowSurfaceFactory(EGLWindowSurfaceFactory)}
*/
interface EGLWindowSurfaceFactory
{
EGLSurface createWindowSurface(EGL10 egl, EGLDisplay display,
EGLConfig config, Object nativeWindow);
void destroySurface(EGL10 egl, EGLDisplay display, EGLSurface surface);
}
class DefaultWindowSurfaceFactory implements EGLWindowSurfaceFactory
{
public EGLSurface createWindowSurface(EGL10 egl, EGLDisplay display,
EGLConfig config, Object nativeWindow)
{
// this is a bit of a hack to work around Droid init problems - if you
// don't have this, it'll get hung up on orientation changes
EGLSurface eglSurface = null;
while (eglSurface == null)
{
try
{
eglSurface = egl.eglCreateWindowSurface(display, config, nativeWindow, null);
}
catch (Throwable t)
{
}
finally
{
if (eglSurface == null)
{
try
{
Thread.sleep(10);
}
catch (InterruptedException t)
{
}
}
}
}
Log.i("Paper3D", "GLThread: Surface created " + eglSurface.toString() + " on " + nativeWindow.toString() );
return eglSurface;
}
public void destroySurface(EGL10 egl, EGLDisplay display, EGLSurface surface)
{
Log.i("Paper3D", "GLThread: Destory surface " + surface.toString());
egl.eglDestroySurface(display, surface);
}
}
/**
* An interface for customizing the eglCreateContext and eglDestroyContext
* calls.
*
*
* This interface must be implemented by clients wishing to call
* {@link GLWallpaperService#setEGLContextFactory(EGLContextFactory)}
*/
interface EGLContextFactory
{
EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig);
void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context);
}
class DefaultContextFactory implements EGLContextFactory
{
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig config)
{
Log.i("Paper3D", "GLThread: creating OpenGL ES 2.0 context");
int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, config, EGL10.EGL_NO_CONTEXT, attrib_list);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context)
{
egl.eglDestroyContext(display, context);
}
}
| |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.uberfirebootstrap.client.widgets;
import java.util.Iterator;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.KeyCodes;
import com.google.gwt.event.dom.client.KeyDownEvent;
import com.google.gwt.event.dom.client.KeyDownHandler;
import com.google.gwt.event.dom.client.MouseDownEvent;
import com.google.gwt.event.dom.client.MouseDownHandler;
import com.google.gwt.event.dom.client.MouseMoveEvent;
import com.google.gwt.event.dom.client.MouseMoveHandler;
import com.google.gwt.event.dom.client.MouseUpEvent;
import com.google.gwt.event.dom.client.MouseUpHandler;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.FocusPanel;
import com.google.gwt.user.client.ui.FocusWidget;
import com.google.gwt.user.client.ui.Focusable;
import com.google.gwt.user.client.ui.HasWidgets;
import com.google.gwt.user.client.ui.PopupPanel;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
import org.kie.uberfirebootstrap.client.widgets.FormStyleLayout;
public abstract class Popup extends PopupPanel {
private boolean dragged = false;
private int dragStartX;
private int dragStartY;
private Command afterShowEvent;
private Command afterCloseEvent;
private boolean fixedLocation = false;
private PopupTitleBar titleBar;
public Popup() {
setGlassEnabled( true );
}
public void setAfterShow(Command afterShowEvent) {
this.afterShowEvent = afterShowEvent;
}
public void setAfterCloseEvent(Command afterCloseEvent) {
this.afterCloseEvent = afterCloseEvent;
}
@Override
public void show() {
clear();
if ( afterShowEvent != null ) {
afterShowEvent.execute();
}
VerticalPanel verticalPanel = new VerticalPanel();
verticalPanel.setHorizontalAlignment( VerticalPanel.ALIGN_RIGHT );
this.titleBar = new PopupTitleBar( getTitle() );
this.titleBar.closeButton.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
hide();
if ( afterCloseEvent != null ) {
afterCloseEvent.execute();
}
}
} );
this.titleBar.addMouseDownHandler( new MouseDownHandler() {
public void onMouseDown(MouseDownEvent event) {
dragged = true;
dragStartX = event.getRelativeX( getElement() );
dragStartY = event.getRelativeY( getElement() );
DOM.setCapture( titleBar.getElement() );
}
} );
this.titleBar.addMouseMoveHandler( new MouseMoveHandler() {
public void onMouseMove(MouseMoveEvent event) {
if ( dragged ) {
setPopupPosition( event.getClientX() - dragStartX,
event.getClientY() - dragStartY );
}
}
} );
this.titleBar.addMouseUpHandler( new MouseUpHandler() {
public void onMouseUp(MouseUpEvent event) {
dragged = false;
DOM.releaseCapture( titleBar.getElement() );
}
} );
verticalPanel.add( titleBar );
Widget content = getContent();
content.setWidth( "100%" );
verticalPanel.add( content );
add( verticalPanel );
add( createKeyListeningFocusPanel( verticalPanel ) );
super.show();
focusFirstWidget( content );
if ( !fixedLocation ) {
center();
}
}
private FocusPanel createKeyListeningFocusPanel(VerticalPanel verticalPanel) {
FocusPanel focusPanel = new FocusPanel( verticalPanel );
focusPanel.addKeyDownHandler( new KeyDownHandler() {
public void onKeyDown(KeyDownEvent event) {
if ( event.getNativeKeyCode() == KeyCodes.KEY_ESCAPE ) {
hide();
}
}
} );
focusPanel.setStyleName( "" );
focusPanel.setFocus( true );
focusPanel.setWidth( "100%" );
return focusPanel;
}
private void focusFirstWidget(Widget content) {
if ( content instanceof FormStyleLayout) {
FormStyleLayout fsl = (FormStyleLayout) content;
Widget ow = fsl.getWidget();
if ( ow instanceof HasWidgets ) {
focusFirstWidget( (HasWidgets) ow );
}
}
}
private boolean focusFirstWidget(HasWidgets container) {
boolean bFocused = false;
Iterator<Widget> iw = container.iterator();
while ( !bFocused && iw.hasNext() ) {
Widget w = iw.next();
if ( w instanceof HasWidgets ) {
bFocused = focusFirstWidget( (HasWidgets) w );
} else if ( w instanceof Focusable ) {
((Focusable) w).setFocus( true );
bFocused = true;
break;
} else if ( w instanceof FocusWidget ) {
((FocusWidget) w).setFocus( true );
bFocused = true;
break;
}
}
return bFocused;
}
@Override
public void setPopupPosition(int left,
int top) {
super.setPopupPosition( left,
top );
if ( left != 0 && top != 0 ) {
fixedLocation = true;
}
}
/**
* This returns the height of the usable client space, excluding title bar.
*
* @return
*/
public int getClientHeight() {
return this.getWidget().getOffsetHeight() - this.titleBar.getOffsetHeight();
}
abstract public Widget getContent();
}
| |
package com.cradle.iitc_mobile;
import android.app.ActionBar;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.widget.DrawerLayout;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
public class IITC_NavigationHelper extends ActionBarDrawerToggle implements OnItemClickListener {
// Show/hide the up arrow on the very left
// getActionBar().setDisplayHomeAsUpEnabled(enabled);
// Show/hide the activity icon/logo
// getActionBar().setDisplayShowHomeEnabled(enabled);
// Show/hide the activity title
// getActionBar().setDisplayShowTitleEnabled(enabled);
// Makes the icon/title clickable
// getActionBar().setHomeButtonEnabled(enabled);
private final IITC_Mobile mIitc;
private final ActionBar mActionBar;
private final SharedPreferences mPrefs;
private final NavigationAdapter mNavigationAdapter;
private final DrawerLayout mDrawerLayout;
private final ListView mDrawerLeft;
private final View mDrawerRight;
private final IITC_NotificationHelper mNotificationHelper;
private boolean mDesktopMode = false;
private Pane mPane = Pane.MAP;
private String mHighlighter = null;
public IITC_NavigationHelper(final IITC_Mobile iitc, final ActionBar bar) {
super(iitc, (DrawerLayout) iitc.findViewById(R.id.drawer_layout),
R.drawable.ic_drawer, R.string.drawer_open, R.string.drawer_close);
mIitc = iitc;
mActionBar = bar;
mDrawerLeft = (ListView) iitc.findViewById(R.id.left_drawer);
mDrawerRight = iitc.findViewById(R.id.right_drawer);
mDrawerLayout = (DrawerLayout) iitc.findViewById(R.id.drawer_layout);
mPrefs = PreferenceManager.getDefaultSharedPreferences(iitc);
mActionBar.setDisplayShowHomeEnabled(true); // show icon
mNavigationAdapter = new NavigationAdapter();
mDrawerLeft.setAdapter(mNavigationAdapter);
mDrawerLeft.setOnItemClickListener(this);
mDrawerLeft.setItemChecked(0, true);
mDrawerLayout.setDrawerListener(this);
mNotificationHelper = new IITC_NotificationHelper(mIitc);
onPrefChanged(); // also calls updateActionBar()
mNotificationHelper.showNotice(IITC_NotificationHelper.NOTICE_HOWTO);
}
private void updateViews() {
final int position = mNavigationAdapter.getPosition(mPane);
if (position >= 0 && position < mNavigationAdapter.getCount()) {
mDrawerLeft.setItemChecked(position, true);
} else {
mDrawerLeft.setItemChecked(mDrawerLeft.getCheckedItemPosition(), false);
}
if (mDesktopMode) {
mActionBar.setDisplayHomeAsUpEnabled(false); // Hide "up" indicator
mActionBar.setHomeButtonEnabled(false); // Make icon unclickable
mActionBar.setTitle(mIitc.getString(R.string.app_name));
mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED, mDrawerLeft);
mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_UNLOCKED, mDrawerRight);
setDrawerIndicatorEnabled(false);
} else {
if (mIitc.isLoading()) {
mActionBar.setDisplayHomeAsUpEnabled(false); // Hide "up" indicator
mActionBar.setHomeButtonEnabled(false);// Make icon unclickable
mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED);
setDrawerIndicatorEnabled(false);
} else {
mActionBar.setDisplayHomeAsUpEnabled(true); // Show "up" indicator
mActionBar.setHomeButtonEnabled(true);// Make icon clickable
mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_UNLOCKED);
if (mPane == Pane.MAP || mDrawerLayout.isDrawerOpen(mDrawerLeft)) {
setDrawerIndicatorEnabled(true);
} else {
setDrawerIndicatorEnabled(false);
}
}
if (mDrawerLayout.isDrawerOpen(mDrawerLeft) || mPane == Pane.MAP) {
mActionBar.setTitle(mIitc.getString(R.string.app_name));
} else {
mActionBar.setTitle(mPane.label);
}
}
final boolean mapVisible = mDesktopMode || mPane == Pane.MAP;
if ("No Highlights".equals(mHighlighter) || isDrawerOpened() || mIitc.isLoading() || !mapVisible) {
mActionBar.setSubtitle(null);
} else {
mActionBar.setSubtitle(mHighlighter);
}
}
public void addPane(final String name, final String label, final String icon) {
mNotificationHelper.showNotice(IITC_NotificationHelper.NOTICE_PANES);
final Resources res = mIitc.getResources();
final String packageName = res.getResourcePackageName(R.string.app_name);
/*
* since the package name is overridden in test builds
* we can't use context.getPackageName() to get the package name
* because the resources were processed before the package name was finally updated.
* so we have to retrieve the package name of another resource with Resources.getResourcePackageName()
* see http://www.piwai.info/renaming-android-manifest-package/
*/
final int resId = mIitc.getResources().getIdentifier(icon, "drawable", packageName);
mNavigationAdapter.add(new Pane(name, label, resId));
}
public void closeDrawers() {
mDrawerLayout.closeDrawers();
}
public Pane getPane(final String id) {
for (int i = 0; i < mNavigationAdapter.getCount(); i++) {
final Pane pane = mNavigationAdapter.getItem(i);
if (pane.name.equals(id))
return pane;
}
throw new IllegalArgumentException("Unknown pane: " + id);
}
public void hideActionBar() {
mActionBar.hide();
}
public boolean isDrawerOpened() {
return mDrawerLayout.isDrawerOpen(mDrawerLeft) || mDrawerLayout.isDrawerOpen(mDrawerRight);
}
@Override
public void onDrawerClosed(final View drawerView) {
super.onDrawerClosed(drawerView);
mIitc.getWebView().onWindowFocusChanged(true);
// delay invalidating to prevent flickering in case another drawer is opened
(new Handler()).postDelayed(new Runnable() {
@Override
public void run() {
mIitc.invalidateOptionsMenu();
updateViews();
}
}, 200);
}
@Override
public void onDrawerOpened(final View drawerView) {
super.onDrawerOpened(drawerView);
mIitc.getWebView().onWindowFocusChanged(false);
mIitc.invalidateOptionsMenu();
updateViews();
mDrawerLayout.closeDrawer(drawerView.equals(mDrawerLeft) ? mDrawerRight : mDrawerLeft);
}
@Override
public void onItemClick(final AdapterView<?> parent, final View view, final int position, final long id) {
final Pane item = mNavigationAdapter.getItem(position);
mIitc.switchToPane(item);
if (item == Pane.INFO) {
mNotificationHelper.showNotice(IITC_NotificationHelper.NOTICE_INFO);
}
mDrawerLayout.closeDrawer(mDrawerLeft);
}
public void onLoadingStateChanged() {
updateViews();
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
if (item.getItemId() == android.R.id.home) {
mDrawerLayout.closeDrawer(mDrawerRight);
}
return super.onOptionsItemSelected(item);
}
public void onPostCreate(final Bundle savedInstanceState) {
// Sync the toggle state after onRestoreInstanceState has occurred.
syncState();
}
public void onPrefChanged() {
mDesktopMode = mPrefs.getBoolean("pref_force_desktop", false);
updateViews();
}
public void openRightDrawer() {
if (mDrawerLayout.getDrawerLockMode(mDrawerRight) == DrawerLayout.LOCK_MODE_UNLOCKED) {
mDrawerLayout.openDrawer(mDrawerRight);
}
}
public void reset() {
mPane = Pane.MAP;
mNavigationAdapter.reset();
updateViews();
}
public void setDebugMode(final boolean enabled) {
mNavigationAdapter.reset();
}
public void setHighlighter(final String name) {
mHighlighter = name;
updateViews();
}
public void showActionBar() {
mActionBar.show();
}
public void switchTo(final Pane pane) {
mPane = pane;
if (pane.equals(Pane.INFO)) mNotificationHelper.showNotice(IITC_NotificationHelper.NOTICE_SHARING);
updateViews();
}
private class NavigationAdapter extends ArrayAdapter<Pane> {
public NavigationAdapter() {
super(mIitc, R.layout.list_item_selectable);
reset();
}
@Override
public View getView(final int position, final View convertView, final ViewGroup parent) {
final TextView view = (TextView) super.getView(position, convertView, parent);
final Pane item = getItem(position);
view.setText(item.label);
if (item.icon != 0) {
view.setCompoundDrawablesWithIntrinsicBounds(item.icon, 0, 0, 0);
}
return view;
}
public void reset() {
clear();
add(Pane.INFO);
add(Pane.ALL);
add(Pane.FACTION);
add(Pane.ALERTS);
}
}
public static class Pane {
public static final Pane ALL = new Pane("all", "All", R.drawable.ic_action_view_as_list);
public static final Pane FACTION = new Pane("faction", "Faction", R.drawable.ic_action_cc_bcc);
public static final Pane ALERTS = new Pane("alerts", "Alerts", R.drawable.ic_action_warning);
public static final Pane INFO = new Pane("info", "Info", R.drawable.ic_action_about);
public static final Pane MAP = new Pane("map", "Map", R.drawable.ic_action_map);
private final int icon;
public String label;
public String name;
public Pane(final String name, final String label, final int icon) {
this.name = name;
this.label = label;
this.icon = icon;
}
@Override
public boolean equals(final Object o) {
if (o == null) return false;
if (o.getClass() != getClass()) return false;
final Pane pane = (Pane) o;
return name.equals(pane.name);
}
@Override
public int hashCode() {
return name.hashCode();
}
}
}
| |
/*
* Copyright 2002-2004 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.validation;
import java.beans.PropertyEditor;
import java.util.Collections;
import java.util.EmptyStackException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.BeanWrapperImpl;
/**
* Default implementation of the Errors interface, supporting
* registration and evaluation of binding errors.
* Slightly unusual, as it <i>is</i> an exception.
*
* <p>This is mainly a framework-internal class. Normally, application
* code will work with the Errors interface, or a DataBinder that in
* turn exposes a BindException via <code>getErrors()</code>.
*
* <p>Supports exporting a model, suitable for example for web MVC.
* Thus, it is sometimes used as parameter type instead of the Errors interface
* itself - if extracting the model makes sense in the respective context.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see #getModel
* @see DataBinder#getErrors
*/
public class BindException extends Exception implements Errors {
/**
* Prefix for the name of the Errors instance in a model,
* followed by the object name.
*/
public static final String ERROR_KEY_PREFIX = BindException.class.getName() + ".";
private final List errors = new LinkedList();
private final BeanWrapper beanWrapper;
private final String objectName;
private MessageCodesResolver messageCodesResolver = new DefaultMessageCodesResolver();
private String nestedPath = "";
private final Stack nestedPathStack = new Stack();
/**
* Create a new BindException instance.
* @param target target object to bind onto
* @param name name of the target object
* @see DefaultMessageCodesResolver
*/
public BindException(Object target, String name) {
this.beanWrapper = new BeanWrapperImpl(target);
this.objectName = name;
this.nestedPath = "";
}
/**
* Return the BeanWrapper that this instance uses.
*/
protected BeanWrapper getBeanWrapper() {
return beanWrapper;
}
/**
* Return the wrapped target object.
*/
public Object getTarget() {
return this.beanWrapper.getWrappedInstance();
}
public String getObjectName() {
return objectName;
}
/**
* Set the strategy to use for resolving errors into message codes.
* Default is DefaultMessageCodesResolver.
* @see DefaultMessageCodesResolver
*/
public void setMessageCodesResolver(MessageCodesResolver messageCodesResolver) {
this.messageCodesResolver = messageCodesResolver;
}
/**
* Return the strategy to use for resolving errors into message codes.
*/
public MessageCodesResolver getMessageCodesResolver() {
return messageCodesResolver;
}
public void setNestedPath(String nestedPath) {
doSetNestedPath(nestedPath);
this.nestedPathStack.clear();
}
public String getNestedPath() {
return nestedPath;
}
public void pushNestedPath(String subPath) {
this.nestedPathStack.push(this.nestedPath);
doSetNestedPath(this.nestedPath + subPath);
}
public void popNestedPath() throws IllegalArgumentException {
try {
String formerNestedPath = (String) this.nestedPathStack.pop();
doSetNestedPath(formerNestedPath);
}
catch (EmptyStackException ex) {
throw new IllegalStateException("Cannot pop nested path: no nested path on stack");
}
}
/**
* Actually set the nested path.
* Delegated to by setNestedPath and pushNestedPath.
*/
protected void doSetNestedPath(String nestedPath) {
if (nestedPath == null) {
nestedPath = "";
}
if (nestedPath.length() > 0 && !nestedPath.endsWith(NESTED_PATH_SEPARATOR)) {
nestedPath += NESTED_PATH_SEPARATOR;
}
this.nestedPath = nestedPath;
}
/**
* Transform the given field into its full path,
* regarding the nested path of this instance.
*/
protected String fixedField(String field) {
return this.nestedPath + field;
}
public void reject(String errorCode, String defaultMessage) {
reject(errorCode, null, defaultMessage);
}
public void reject(String errorCode, Object[] errorArgs, String defaultMessage) {
addError(new ObjectError(this.objectName, resolveMessageCodes(errorCode), errorArgs, defaultMessage));
}
public void rejectValue(String field, String errorCode, String defaultMessage) {
rejectValue(field, errorCode, null, defaultMessage);
}
public void rejectValue(String field, String errorCode, Object[] errorArgs, String defaultMessage) {
String fixedField = fixedField(field);
Object newVal = getBeanWrapper().getPropertyValue(fixedField);
FieldError fe = new FieldError(
this.objectName, fixedField, newVal, false,
resolveMessageCodes(errorCode, field), errorArgs, defaultMessage);
addError(fe);
}
protected String[] resolveMessageCodes(String errorCode) {
return this.messageCodesResolver.resolveMessageCodes(errorCode, this.objectName);
}
protected String[] resolveMessageCodes(String errorCode, String field) {
String fixedField = fixedField(field);
Class fieldType = this.beanWrapper.getPropertyType(fixedField);
return this.messageCodesResolver.resolveMessageCodes(errorCode, this.objectName, fixedField, fieldType);
}
/**
* Add a FieldError to the errors list.
* Intended to be used by subclasses like DataBinder.
*/
protected void addError(ObjectError error) {
this.errors.add(error);
}
public boolean hasErrors() {
return !this.errors.isEmpty();
}
public int getErrorCount() {
return this.errors.size();
}
public List getAllErrors() {
return Collections.unmodifiableList(this.errors);
}
public boolean hasGlobalErrors() {
return (getGlobalErrorCount() > 0);
}
public int getGlobalErrorCount() {
return getGlobalErrors().size();
}
public List getGlobalErrors() {
List result = new LinkedList();
for (Iterator it = this.errors.iterator(); it.hasNext();) {
Object error = it.next();
if (!(error instanceof FieldError)) {
result.add(error);
}
}
return Collections.unmodifiableList(result);
}
public ObjectError getGlobalError() {
for (Iterator it = this.errors.iterator(); it.hasNext();) {
ObjectError objectError = (ObjectError) it.next();
if (!(objectError instanceof FieldError)) {
return objectError;
}
}
return null;
}
public boolean hasFieldErrors(String field) {
return (getFieldErrorCount(field) > 0);
}
public int getFieldErrorCount(String field) {
return getFieldErrors(field).size();
}
public List getFieldErrors(String field) {
List result = new LinkedList();
String fixedField = fixedField(field);
for (Iterator it = this.errors.iterator(); it.hasNext();) {
Object error = it.next();
if (error instanceof FieldError && isMatchingFieldError(fixedField, ((FieldError) error))) {
result.add(error);
}
}
return Collections.unmodifiableList(result);
}
public FieldError getFieldError(String field) {
String fixedField = fixedField(field);
for (Iterator it = this.errors.iterator(); it.hasNext();) {
Object error = it.next();
if (error instanceof FieldError) {
FieldError fe = (FieldError) error;
if (isMatchingFieldError(fixedField, fe)) {
return fe;
}
}
}
return null;
}
/**
* Check whether the given FieldError matches the given field.
* @param field the field that we are looking up FieldErrors for
* @param fieldError the candidate FieldError
* @return whether the FieldError matches the given field
*/
protected boolean isMatchingFieldError(String field, FieldError fieldError) {
return (field.equals(fieldError.getField()) ||
(field.endsWith("*") && fieldError.getField().startsWith(field.substring(0, field.length() - 1))));
}
public Object getFieldValue(String field) {
FieldError fe = getFieldError(field);
String fixedField = fixedField(field);
// use rejected value in case of error, current bean property value else
Object value = (fe != null) ? fe.getRejectedValue() : getBeanWrapper().getPropertyValue(fixedField);
// apply custom editor, but not on binding failures like type mismatches
if (fe == null || !fe.isBindingFailure()) {
PropertyEditor customEditor = getCustomEditor(fixedField);
if (customEditor != null) {
customEditor.setValue(value);
return customEditor.getAsText();
}
}
return value;
}
/**
* Retrieve the custom PropertyEditor for the given field, if any.
* @param field the field name
* @return the custom PropertyEditor, or null
*/
public PropertyEditor getCustomEditor(String field) {
String fixedField = fixedField(field);
Class type = getBeanWrapper().getPropertyType(fixedField);
return getBeanWrapper().findCustomEditor(type, fixedField);
}
/**
* Return a model Map for the obtained state, exposing an Errors
* instance as '{@link #ERROR_KEY_PREFIX ERROR_KEY_PREFIX} + objectName'
* and the object itself.
* <p>Note that the Map is constructed every time you're calling this method.
* Adding things to the map and then re-calling this method will not work.
* <p>The attributes in the model Map returned by this method are usually
* included in the ModelAndView for a form view that uses Spring's bind tag,
* which needs access to the Errors instance. Spring's SimpleFormController
* will do this for you when rendering its form or success view. When
* building the ModelAndView yourself, you need to include the attributes
* from the model Map returned by this method yourself.
* @see #getObjectName
* @see #ERROR_KEY_PREFIX
* @see org.springframework.web.servlet.ModelAndView
* @see org.springframework.web.servlet.tags.BindTag
* @see org.springframework.web.servlet.mvc.SimpleFormController
*/
public final Map getModel() {
Map model = new HashMap();
// errors instance, even if no errors
model.put(ERROR_KEY_PREFIX + this.objectName, this);
// mapping from name to target object
model.put(this.objectName, this.beanWrapper.getWrappedInstance());
return model;
}
/**
* Returns diagnostic information about the errors held in this object.
*/
public String getMessage() {
StringBuffer sb = new StringBuffer("BindException: ");
sb.append(getErrorCount()).append(" errors");
Iterator it = this.errors.iterator();
while (it.hasNext()) {
sb.append("; ").append(it.next());
}
return sb.toString();
}
}
| |
/*
* V.java
* Copyright (C) 2003
*
* $Id: V.java,v 1.5 2005/07/01 14:20:50 hzi Exp $
*/
/*
Copyright (C) 1997-2001 Id Software, Inc.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package org.free.jake2.client;
import java.io.IOException;
import java.nio.FloatBuffer;
import org.free.jake2.game.Cmd;
import org.free.jake2.game.cvar;
import org.free.jake2.qcommon.Com;
import org.free.jake2.qcommon.Cvar;
import org.free.jake2.qcommon.XCommand;
import org.free.jake2.sys.Timer;
import org.free.jake2.util.Math3D;
import org.free.jake2.util.Vargs;
import static org.free.jake2.Defines.*;
import static org.free.jake2.Globals.*;
/**
* V
*/
public final class V {
static cvar cl_testblend;
static cvar cl_testparticles;
static cvar cl_testentities;
static cvar cl_testlights;
static cvar cl_stats;
static int r_numdlights;
static dlight_t[] r_dlights = new dlight_t[MAX_DLIGHTS];
static int r_numentities;
static entity_t[] r_entities = new entity_t[MAX_ENTITIES];
static int r_numparticles;
//static particle_t[] r_particles = new particle_t[MAX_PARTICLES];
static lightstyle_t[] r_lightstyles = new lightstyle_t[MAX_LIGHTSTYLES];
static {
for (int i = 0; i < MAX_DLIGHTS; i++) {
r_dlights[i] = new dlight_t();
}
for (int i = 0; i < MAX_ENTITIES; i++) {
r_entities[i] = new entity_t();
}
for (int i = 0; i < MAX_LIGHTSTYLES; i++) {
r_lightstyles[i] = new lightstyle_t();
}
}
/*
* ==================== V_ClearScene
*
* Specifies the model that will be used as the world ====================
*/
static void ClearScene() {
r_numdlights = 0;
r_numentities = 0;
r_numparticles = 0;
}
/*
* ===================== V_AddEntity
*
* =====================
*/
static void AddEntity(entity_t ent) {
if (r_numentities >= MAX_ENTITIES) {
return;
}
r_entities[r_numentities++].set(ent);
}
/*
* ===================== V_AddParticle
*
* =====================
*/
static void AddParticle(float[] org, int color, float alpha) {
if (r_numparticles >= MAX_PARTICLES) {
return;
}
int i = r_numparticles++;
int c = particle_t.colorTable[color];
c |= (int) (alpha * 255) << 24;
particle_t.colorArray.put(i, c);
i *= 3;
FloatBuffer vertexBuf = particle_t.vertexArray;
vertexBuf.put(i++, org[0]);
vertexBuf.put(i++, org[1]);
vertexBuf.put(i++, org[2]);
}
/*
* ===================== V_AddLight
*
* =====================
*/
static void AddLight(float[] org, float intensity, float r, float g, float b) {
dlight_t dl;
if (r_numdlights >= MAX_DLIGHTS) {
return;
}
dl = r_dlights[r_numdlights++];
Math3D.VectorCopy(org, dl.origin);
dl.intensity = intensity;
dl.color[0] = r;
dl.color[1] = g;
dl.color[2] = b;
}
/*
* ===================== V_AddLightStyle
*
* =====================
*/
static void AddLightStyle(int style, float r, float g, float b) {
lightstyle_t ls;
if (style < 0 || style > MAX_LIGHTSTYLES) {
Com.Error(ERR_DROP, "Bad light style " + style);
}
ls = r_lightstyles[style];
ls.white = r + g + b;
ls.rgb[0] = r;
ls.rgb[1] = g;
ls.rgb[2] = b;
}
// stack variable
private static final float[] origin = {0, 0, 0};
/*
* ================ V_TestParticles
*
* If cl_testparticles is set, create 4096 particles in the view
* ================
*/
static void TestParticles() {
int i, j;
float d, r, u;
r_numparticles = 0;
for (i = 0; i < MAX_PARTICLES; i++) {
d = i * 0.25f;
r = 4 * ((i & 7) - 3.5f);
u = 4 * (((i >> 3) & 7) - 3.5f);
for (j = 0; j < 3; j++) {
origin[j] = cl.refdef.vieworg[j] + cl.v_forward[j] * d
+ cl.v_right[j] * r + cl.v_up[j] * u;
}
AddParticle(origin, 8, cl_testparticles.value);
}
}
/*
* ================ V_TestEntities
*
* If cl_testentities is set, create 32 player models ================
*/
static void TestEntities() {
int i, j;
float f, r;
entity_t ent;
r_numentities = 32;
//memset (r_entities, 0, sizeof(r_entities));
for (i = 0; i < r_entities.length; i++) {
r_entities[i].clear();
}
for (i = 0; i < r_numentities; i++) {
ent = r_entities[i];
r = 64 * ((i % 4) - 1.5f);
f = 64 * (i / 4) + 128;
for (j = 0; j < 3; j++) {
ent.origin[j] = cl.refdef.vieworg[j] + cl.v_forward[j] * f
+ cl.v_right[j] * r;
}
ent.model = cl.baseclientinfo.model;
ent.skin = cl.baseclientinfo.skin;
}
}
/*
* ================ V_TestLights
*
* If cl_testlights is set, create 32 lights models ================
*/
static void TestLights() {
int i, j;
float f, r;
dlight_t dl;
r_numdlights = 32;
//memset (r_dlights, 0, sizeof(r_dlights));
for (i = 0; i < r_dlights.length; i++) {
r_dlights[i] = new dlight_t();
}
for (i = 0; i < r_numdlights; i++) {
dl = r_dlights[i];
r = 64 * ((i % 4) - 1.5f);
f = 64 * (i / 4) + 128;
for (j = 0; j < 3; j++) {
dl.origin[j] = cl.refdef.vieworg[j] + cl.v_forward[j] * f
+ cl.v_right[j] * r;
}
dl.color[0] = ((i % 6) + 1) & 1;
dl.color[1] = (((i % 6) + 1) & 2) >> 1;
dl.color[2] = (((i % 6) + 1) & 4) >> 2;
dl.intensity = 200;
}
}
static XCommand Gun_Next_f = new XCommand() {
public void execute() {
gun_frame++;
Com.Printf("frame " + gun_frame + "\n");
}
};
static XCommand Gun_Prev_f = new XCommand() {
public void execute() {
gun_frame--;
if (gun_frame < 0) {
gun_frame = 0;
}
Com.Printf("frame " + gun_frame + "\n");
}
};
static XCommand Gun_Model_f = new XCommand() {
public void execute() {
if (Cmd.Argc() != 2) {
gun_model = null;
return;
}
String name = "models/" + Cmd.Argv(1) + "/tris.md2";
gun_model = re.RegisterModel(name);
}
};
/*
* ================== V_RenderView
*
* ==================
*/
static void RenderView(float stereo_separation) {
// extern int entitycmpfnc( const entity_t *, const entity_t * );
//
if (cls.state != ca_active) {
return;
}
if (!cl.refresh_prepped) {
return; // still loading
}
if (cl_timedemo.value != 0.0f) {
if (cl.timedemo_start == 0) {
cl.timedemo_start = Timer.Milliseconds();
}
cl.timedemo_frames++;
}
// an invalid frame will just use the exact previous refdef
// we can't use the old frame if the video mode has changed, though...
if (cl.frame.valid && (cl.force_refdef || cl_paused.value == 0.0f)) {
cl.force_refdef = false;
V.ClearScene();
// build a refresh entity list and calc cl.sim*
// this also calls CL_CalcViewValues which loads
// v_forward, etc.
CL_ents.AddEntities();
if (cl_testparticles.value != 0.0f) {
TestParticles();
}
if (cl_testentities.value != 0.0f) {
TestEntities();
}
if (cl_testlights.value != 0.0f) {
TestLights();
}
if (cl_testblend.value != 0.0f) {
cl.refdef.blend[0] = 1.0f;
cl.refdef.blend[1] = 0.5f;
cl.refdef.blend[2] = 0.25f;
cl.refdef.blend[3] = 0.5f;
}
// offset vieworg appropriately if we're doing stereo separation
if (stereo_separation != 0) {
float[] tmp = new float[3];
Math3D.VectorScale(cl.v_right, stereo_separation, tmp);
Math3D.VectorAdd(cl.refdef.vieworg, tmp, cl.refdef.vieworg);
}
// never let it sit exactly on a node line, because a water plane
// can
// dissapear when viewed with the eye exactly on it.
// the server protocol only specifies to 1/8 pixel, so add 1/16 in
// each axis
cl.refdef.vieworg[0] += 1.0 / 16;
cl.refdef.vieworg[1] += 1.0 / 16;
cl.refdef.vieworg[2] += 1.0 / 16;
cl.refdef.x = scr_vrect.x;
cl.refdef.y = scr_vrect.y;
cl.refdef.width = scr_vrect.width;
cl.refdef.height = scr_vrect.height;
cl.refdef.fov_y = Math3D.CalcFov(cl.refdef.fov_x, cl.refdef.width,
cl.refdef.height);
cl.refdef.time = cl.time * 0.001f;
cl.refdef.areabits = cl.frame.areabits;
if (cl_add_entities.value == 0.0f) {
r_numentities = 0;
}
if (cl_add_particles.value == 0.0f) {
r_numparticles = 0;
}
if (cl_add_lights.value == 0.0f) {
r_numdlights = 0;
}
if (cl_add_blend.value == 0) {
Math3D.VectorClear(cl.refdef.blend);
}
cl.refdef.num_entities = r_numentities;
cl.refdef.entities = r_entities;
cl.refdef.num_particles = r_numparticles;
cl.refdef.num_dlights = r_numdlights;
cl.refdef.dlights = r_dlights;
cl.refdef.lightstyles = r_lightstyles;
cl.refdef.rdflags = cl.frame.playerstate.rdflags;
// sort entities for better cache locality
// !!! useless in Java !!!
//Arrays.sort(cl.refdef.entities, entitycmpfnc);
}
re.RenderFrame(cl.refdef);
if (cl_stats.value != 0.0f) {
Com.Printf("ent:%i lt:%i part:%i\n", new Vargs(3).add(
r_numentities).add(r_numdlights).add(r_numparticles));
}
if (log_stats.value != 0.0f && (log_stats_file != null)) {
try {
log_stats_file.write(r_numentities + "," + r_numdlights + ","
+ r_numparticles);
} catch (IOException e) {
}
}
SCR.AddDirtyPoint(scr_vrect.x, scr_vrect.y);
SCR.AddDirtyPoint(scr_vrect.x + scr_vrect.width - 1, scr_vrect.y
+ scr_vrect.height - 1);
SCR.DrawCrosshair();
}
/*
* ============= V_Viewpos_f =============
*/
static XCommand Viewpos_f = new XCommand() {
public void execute() {
Com.Printf("(%i %i %i) : %i\n", new Vargs(4).add(
(int) cl.refdef.vieworg[0]).add((int) cl.refdef.vieworg[1]).add((int) cl.refdef.vieworg[2]).add(
(int) cl.refdef.viewangles[YAW]));
}
};
public static void Init() {
Cmd.AddCommand("gun_next", Gun_Next_f);
Cmd.AddCommand("gun_prev", Gun_Prev_f);
Cmd.AddCommand("gun_model", Gun_Model_f);
Cmd.AddCommand("viewpos", Viewpos_f);
crosshair = Cvar.Get("crosshair", "0", CVAR_ARCHIVE);
cl_testblend = Cvar.Get("cl_testblend", "0", 0);
cl_testparticles = Cvar.Get("cl_testparticles", "0", 0);
cl_testentities = Cvar.Get("cl_testentities", "0", 0);
cl_testlights = Cvar.Get("cl_testlights", "0", 0);
cl_stats = Cvar.Get("cl_stats", "0", 0);
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.runtime;
import static com.google.devtools.build.lib.events.Event.of;
import static com.google.devtools.build.lib.events.EventKind.PROGRESS;
import static com.google.devtools.build.lib.util.Preconditions.checkArgument;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.eventbus.Subscribe;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.devtools.build.lib.actions.ActionExecutedEvent;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.EventReportingArtifacts;
import com.google.devtools.build.lib.analysis.BuildInfoEvent;
import com.google.devtools.build.lib.analysis.NoBuildEvent;
import com.google.devtools.build.lib.analysis.extra.ExtraAction;
import com.google.devtools.build.lib.buildeventstream.AbortedEvent;
import com.google.devtools.build.lib.buildeventstream.AnnounceBuildEventTransportsEvent;
import com.google.devtools.build.lib.buildeventstream.ArtifactGroupNamer;
import com.google.devtools.build.lib.buildeventstream.BuildCompletingEvent;
import com.google.devtools.build.lib.buildeventstream.BuildEvent;
import com.google.devtools.build.lib.buildeventstream.BuildEventId;
import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.Aborted.AbortReason;
import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.BuildEventId.NamedSetOfFilesId;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransport;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransportClosedEvent;
import com.google.devtools.build.lib.buildeventstream.BuildEventWithConfiguration;
import com.google.devtools.build.lib.buildeventstream.BuildEventWithOrderConstraint;
import com.google.devtools.build.lib.buildeventstream.ChainableEvent;
import com.google.devtools.build.lib.buildeventstream.LastBuildEvent;
import com.google.devtools.build.lib.buildeventstream.NullConfiguration;
import com.google.devtools.build.lib.buildeventstream.ProgressEvent;
import com.google.devtools.build.lib.buildtool.BuildRequest;
import com.google.devtools.build.lib.buildtool.buildevent.BuildCompleteEvent;
import com.google.devtools.build.lib.buildtool.buildevent.BuildInterruptedEvent;
import com.google.devtools.build.lib.buildtool.buildevent.BuildStartingEvent;
import com.google.devtools.build.lib.buildtool.buildevent.NoAnalyzeEvent;
import com.google.devtools.build.lib.buildtool.buildevent.NoExecutionEvent;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetView;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.util.Pair;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
/**
* Listens for {@link BuildEvent}s and streams them to the provided {@link BuildEventTransport}s.
*
* <p>The streamer takes care of closing all {@link BuildEventTransport}s. It does so after having
* received a {@link BuildCompleteEvent}. Furthermore, it emits two event types to the
* {@code eventBus}. After having received the first {@link BuildEvent} it emits a
* {@link AnnounceBuildEventTransportsEvent} that contains a list of all its transports.
* Furthermore, after a transport has been closed, it emits
* a {@link BuildEventTransportClosedEvent}.
*/
public class BuildEventStreamer implements EventHandler {
private final Collection<BuildEventTransport> transports;
private final Reporter reporter;
private Set<BuildEventId> announcedEvents;
private final Set<BuildEventId> postedEvents = new HashSet<>();
private final Set<BuildEventId> configurationsPosted = new HashSet<>();
private List<Pair<String, String>> bufferedStdoutStderrPairs = new ArrayList<>();
private final Multimap<BuildEventId, BuildEvent> pendingEvents = HashMultimap.create();
private int progressCount;
private final CountingArtifactGroupNamer artifactGroupNamer = new CountingArtifactGroupNamer();
private OutErrProvider outErrProvider;
private AbortReason abortReason = AbortReason.UNKNOWN;
// Will be set to true if the build was invoked through "bazel test".
private boolean isTestCommand;
// After a BuildCompetingEvent we might expect a whitelisted set of events. If non-null,
// the streamer is restricted to only allow those events and fully close after having seen
// them.
private Set<BuildEventId> finalEventsToCome = null;
// True, if we already closed the stream.
private boolean closed;
private static final Logger logger = Logger.getLogger(BuildEventStreamer.class.getName());
/**
* Provider for stdout and stderr output.
*/
public interface OutErrProvider {
/**
* Return the chunk of stdout that was produced since the last call to this function (or the
* beginning of the build, for the first call). It is the responsibility of the class
* implementing this interface to properly synchronize with simultaneously written output.
*/
String getOut();
/**
* Return the chunk of stderr that was produced since the last call to this function (or the
* beginning of the build, for the first call). It is the responsibility of the class
* implementing this interface to properly synchronize with simultaneously written output.
*/
String getErr();
}
private static class CountingArtifactGroupNamer implements ArtifactGroupNamer {
private final Map<Object, Long> reportedArtifactNames = new HashMap<>();
private long nextArtifactName;
@Override
public NamedSetOfFilesId apply(Object id) {
Long name;
synchronized (this) {
name = reportedArtifactNames.get(id);
}
if (name == null) {
return null;
}
return NamedSetOfFilesId.newBuilder().setId(name.toString()).build();
}
/**
* If the {@link NestedSetView} has no name already, return a new name for it. Return null
* otherwise.
*/
synchronized String maybeName(NestedSetView<Artifact> view) {
if (reportedArtifactNames.containsKey(view.identifier())) {
return null;
}
Long name = nextArtifactName;
nextArtifactName++;
reportedArtifactNames.put(view.identifier(), name);
return name.toString();
}
}
public BuildEventStreamer(Collection<BuildEventTransport> transports, Reporter reporter) {
checkArgument(transports.size() > 0);
this.transports = transports;
this.reporter = reporter;
this.announcedEvents = null;
this.progressCount = 0;
}
public void registerOutErrProvider(OutErrProvider outErrProvider) {
this.outErrProvider = outErrProvider;
}
/**
* Post a new event to all transports; simultaneously keep track of the events we announce to
* still come.
*
* <p>Moreover, link unannounced events to the progress stream; we only expect failure events to
* come before their parents.
*/
private void post(BuildEvent event) {
BuildEvent linkEvent = null;
BuildEventId id = event.getEventId();
List<BuildEvent> flushEvents = null;
boolean lastEvent = false;
synchronized (this) {
if (announcedEvents == null) {
announcedEvents = new HashSet<>();
// The very first event of a stream is implicitly announced by the convention that
// a complete stream has to have at least one entry. In this way we keep the invariant
// that the set of posted events is always a subset of the set of announced events.
announcedEvents.add(id);
if (!event.getChildrenEvents().contains(ProgressEvent.INITIAL_PROGRESS_UPDATE)) {
linkEvent = ProgressEvent.progressChainIn(progressCount, event.getEventId());
progressCount++;
announcedEvents.addAll(linkEvent.getChildrenEvents());
// the new first event in the stream, implicitly announced by the fact that complete
// stream may not be empty.
announcedEvents.add(linkEvent.getEventId());
postedEvents.add(linkEvent.getEventId());
}
if (reporter != null) {
reporter.post(new AnnounceBuildEventTransportsEvent(transports));
}
if (!bufferedStdoutStderrPairs.isEmpty()) {
flushEvents = new ArrayList<>(bufferedStdoutStderrPairs.size());
for (Pair<String, String> outErrPair : bufferedStdoutStderrPairs) {
flushEvents.add(flushStdoutStderrEvent(outErrPair.getFirst(), outErrPair.getSecond()));
}
}
bufferedStdoutStderrPairs = null;
} else {
if (!announcedEvents.contains(id)) {
String out = null;
String err = null;
if (outErrProvider != null) {
out = outErrProvider.getOut();
err = outErrProvider.getErr();
}
linkEvent = ProgressEvent.progressChainIn(progressCount, id, out, err);
progressCount++;
announcedEvents.addAll(linkEvent.getChildrenEvents());
postedEvents.add(linkEvent.getEventId());
}
}
if (event instanceof BuildInfoEvent) {
// The specification for BuildInfoEvent says that there may be many such events,
// but all except the first one should be ignored.
if (postedEvents.contains(id)) {
return;
}
}
postedEvents.add(id);
announcedEvents.addAll(event.getChildrenEvents());
// We keep as an invariant that postedEvents is a subset of announced events, so this is a
// cheaper test for equality
if (announcedEvents.size() == postedEvents.size()) {
lastEvent = true;
}
}
BuildEvent mainEvent = event;
if (lastEvent) {
mainEvent = new LastBuildEvent(event);
}
for (BuildEventTransport transport : transports) {
if (linkEvent != null) {
transport.sendBuildEvent(linkEvent, artifactGroupNamer);
}
transport.sendBuildEvent(mainEvent, artifactGroupNamer);
}
if (flushEvents != null) {
for (BuildEvent flushEvent : flushEvents) {
for (BuildEventTransport transport : transports) {
transport.sendBuildEvent(flushEvent, artifactGroupNamer);
}
}
}
}
/**
* If some events are blocked on the absence of a build_started event, generate such an event;
* moreover, make that artificial start event announce all events blocked on it, as well as the
* {@link BuildCompletingEvent} that caused the early end of the stream.
*/
private void clearMissingStartEvent(BuildEventId id) {
if (pendingEvents.containsKey(BuildEventId.buildStartedId())) {
ImmutableSet.Builder<BuildEventId> children = ImmutableSet.<BuildEventId>builder();
children.add(ProgressEvent.INITIAL_PROGRESS_UPDATE);
children.add(id);
children.addAll(
pendingEvents
.get(BuildEventId.buildStartedId())
.stream()
.map(BuildEvent::getEventId)
.collect(ImmutableSet.<BuildEventId>toImmutableSet()));
buildEvent(
new AbortedEvent(BuildEventId.buildStartedId(), children.build(), abortReason, ""));
}
}
/** Clear pending events by generating aborted events for all their requisits. */
private void clearPendingEvents() {
while (!pendingEvents.isEmpty()) {
BuildEventId id = pendingEvents.keySet().iterator().next();
buildEvent(new AbortedEvent(id, abortReason, ""));
}
}
/**
* Clear all events that are still announced; events not naturally closed by the expected event
* normally only occur if the build is aborted.
*/
private void clearAnnouncedEvents(Collection<BuildEventId> dontclear) {
if (announcedEvents != null) {
// create a copy of the identifiers to clear, as the post method
// will change the set of already announced events.
Set<BuildEventId> ids;
synchronized (this) {
ids = Sets.difference(announcedEvents, postedEvents);
}
for (BuildEventId id : ids) {
if (!dontclear.contains(id)) {
post(new AbortedEvent(id, abortReason, ""));
}
}
}
}
private ScheduledFuture<?> bepUploadWaitEvent(ScheduledExecutorService executor) {
final long startNanos = System.nanoTime();
return executor.scheduleAtFixedRate(
() -> {
long deltaNanos = System.nanoTime() - startNanos;
long deltaSeconds = TimeUnit.NANOSECONDS.toSeconds(deltaNanos);
Event waitEvt =
of(PROGRESS, null, "Waiting for build event protocol upload: " + deltaSeconds + "s");
if (reporter != null) {
reporter.handle(waitEvt);
}
},
0,
1,
TimeUnit.SECONDS);
}
public boolean isClosed() {
return closed;
}
private void close() {
synchronized (this) {
if (closed) {
return;
}
closed = true;
}
ScheduledExecutorService executor = null;
try {
executor = Executors.newSingleThreadScheduledExecutor();
List<ListenableFuture<Void>> closeFutures = new ArrayList<>(transports.size());
for (final BuildEventTransport transport : transports) {
ListenableFuture<Void> closeFuture = transport.close();
closeFuture.addListener(
() -> {
if (reporter != null) {
reporter.post(new BuildEventTransportClosedEvent(transport));
}
},
executor);
closeFutures.add(closeFuture);
}
try {
if (closeFutures.isEmpty()) {
// Don't spam events if there is nothing to close.
return;
}
ScheduledFuture<?> f = bepUploadWaitEvent(executor);
// Wait for all transports to close.
Futures.allAsList(closeFutures).get();
f.cancel(true);
} catch (Exception e) {
logger.severe("Failed to close a build event transport: " + e);
}
} finally {
if (executor != null) {
executor.shutdown();
}
}
}
private void maybeReportArtifactSet(NestedSetView<Artifact> view) {
String name = artifactGroupNamer.maybeName(view);
if (name == null) {
return;
}
for (NestedSetView<Artifact> transitive : view.transitives()) {
maybeReportArtifactSet(transitive);
}
post(new NamedArtifactGroup(name, view));
}
private void maybeReportArtifactSet(NestedSet<Artifact> set) {
maybeReportArtifactSet(new NestedSetView<Artifact>(set));
}
private void maybeReportConfiguration(BuildEvent configuration) {
BuildEvent event = configuration;
if (configuration == null) {
event = new NullConfiguration();
}
BuildEventId id = event.getEventId();
synchronized (this) {
if (configurationsPosted.contains(id)) {
return;
}
configurationsPosted.add(id);
}
post(event);
}
@Override
public void handle(Event event) {}
@Subscribe
public void buildInterrupted(BuildInterruptedEvent event) {
abortReason = AbortReason.USER_INTERRUPTED;
}
@Subscribe
public void noAnalyze(NoAnalyzeEvent event) {
abortReason = AbortReason.NO_ANALYZE;
}
@Subscribe
public void noExecution(NoExecutionEvent event) {
abortReason = AbortReason.NO_BUILD;
}
@Subscribe
public void buildEvent(BuildEvent event) {
if (finalEventsToCome != null) {
synchronized (this) {
BuildEventId id = event.getEventId();
if (finalEventsToCome.contains(id)) {
finalEventsToCome.remove(id);
} else {
return;
}
}
}
if (isActionWithoutError(event)
|| bufferUntilPrerequisitesReceived(event)
|| isVacuousTestSummary(event)) {
return;
}
if (isTestCommand && event instanceof BuildCompleteEvent) {
// In case of "bazel test" ignore the BuildCompleteEvent, as it will be followed by a
// TestingCompleteEvent that contains the correct exit code.
return;
}
if (event instanceof BuildStartingEvent) {
BuildRequest buildRequest = ((BuildStartingEvent) event).getRequest();
isTestCommand = "test".equals(buildRequest.getCommandName());
}
if (event instanceof BuildEventWithConfiguration) {
for (BuildEvent configuration : ((BuildEventWithConfiguration) event).getConfigurations()) {
maybeReportConfiguration(configuration);
}
}
if (event instanceof EventReportingArtifacts) {
for (NestedSet<Artifact> artifactSet :
((EventReportingArtifacts) event).reportedArtifacts()) {
maybeReportArtifactSet(artifactSet);
}
}
if (event instanceof BuildCompletingEvent
&& !event.getEventId().equals(BuildEventId.buildStartedId())) {
clearMissingStartEvent(event.getEventId());
}
post(event);
// Reconsider all events blocked by the event just posted.
Collection<BuildEvent> toReconsider = pendingEvents.removeAll(event.getEventId());
for (BuildEvent freedEvent : toReconsider) {
buildEvent(freedEvent);
}
if (event instanceof BuildCompletingEvent) {
buildComplete(event);
}
if (event instanceof NoBuildEvent) {
if (!((NoBuildEvent) event).separateFinishedEvent()) {
buildComplete(event);
}
}
if (finalEventsToCome != null && finalEventsToCome.isEmpty()) {
close();
}
}
private synchronized BuildEvent flushStdoutStderrEvent(String out, String err) {
BuildEvent updateEvent = ProgressEvent.progressUpdate(progressCount, out, err);
progressCount++;
announcedEvents.addAll(updateEvent.getChildrenEvents());
postedEvents.add(updateEvent.getEventId());
return updateEvent;
}
void flush() {
BuildEvent updateEvent = null;
synchronized (this) {
String out = null;
String err = null;
if (outErrProvider != null) {
out = outErrProvider.getOut();
err = outErrProvider.getErr();
}
if (announcedEvents != null) {
updateEvent = flushStdoutStderrEvent(out, err);
} else {
bufferedStdoutStderrPairs.add(Pair.of(out, err));
}
}
if (updateEvent != null) {
for (BuildEventTransport transport : transports) {
transport.sendBuildEvent(updateEvent, artifactGroupNamer);
}
}
}
@VisibleForTesting
public ImmutableSet<BuildEventTransport> getTransports() {
return ImmutableSet.copyOf(transports);
}
private void buildComplete(ChainableEvent event) {
clearPendingEvents();
String out = null;
String err = null;
if (outErrProvider != null) {
out = outErrProvider.getOut();
err = outErrProvider.getErr();
}
post(ProgressEvent.finalProgressUpdate(progressCount, out, err));
clearAnnouncedEvents(event.getChildrenEvents());
finalEventsToCome = new HashSet<>(announcedEvents);
finalEventsToCome.removeAll(postedEvents);
if (finalEventsToCome.isEmpty()) {
close();
}
}
/**
* Return true, if the action is not worth being reported. This is the case, if the action
* executed successfully and is not an ExtraAction.
*/
private static boolean isActionWithoutError(BuildEvent event) {
return event instanceof ActionExecutedEvent
&& ((ActionExecutedEvent) event).getException() == null
&& (!(((ActionExecutedEvent) event).getAction() instanceof ExtraAction));
}
private boolean bufferUntilPrerequisitesReceived(BuildEvent event) {
if (!(event instanceof BuildEventWithOrderConstraint)) {
return false;
}
// Check if all prerequisite events are posted already.
for (BuildEventId prerequisiteId : ((BuildEventWithOrderConstraint) event).postedAfter()) {
if (!postedEvents.contains(prerequisiteId)) {
pendingEvents.put(prerequisiteId, event);
return true;
}
}
return false;
}
/** Return true if the test summary contains no actual test runs. */
private boolean isVacuousTestSummary(BuildEvent event) {
return event instanceof TestSummary && (((TestSummary) event).totalRuns() == 0);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.hadoop.groovy.plugin;
import org.apache.tinkerpop.gremlin.AbstractGremlinTest;
import org.apache.tinkerpop.gremlin.LoadGraphWith;
import org.apache.tinkerpop.gremlin.TestHelper;
import org.apache.tinkerpop.gremlin.groovy.loaders.GremlinLoader;
import org.apache.tinkerpop.gremlin.groovy.plugin.RemoteAcceptor;
import org.apache.tinkerpop.gremlin.groovy.util.SugarTestHelper;
import org.apache.tinkerpop.gremlin.groovy.util.TestableConsolePluginAcceptor;
import org.apache.tinkerpop.gremlin.hadoop.Constants;
import org.apache.tinkerpop.gremlin.hadoop.HadoopGremlinSuite;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.util.Gremlin;
import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils;
import org.junit.Before;
import org.junit.Test;
import javax.tools.JavaCompiler;
import javax.tools.SimpleJavaFileObject;
import javax.tools.ToolProvider;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* This is an test that is mean to be used in the context of the {@link HadoopGremlinSuite} and shouldn't be
* executed on its own.
*
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
public class HadoopGremlinPluginCheck extends AbstractGremlinTest {
@Before
public void setupTest() {
try {
this.console = new TestableConsolePluginAcceptor();
final HadoopGremlinPlugin plugin = new HadoopGremlinPlugin();
plugin.pluginTo(this.console);
this.remote = (HadoopRemoteAcceptor) plugin.remoteAcceptor().get();
} catch (final Exception e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
///////////////////
private HadoopRemoteAcceptor remote;
private TestableConsolePluginAcceptor console;
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldSupportRemoteTraversal() throws Exception {
this.console.addBinding("graph", this.graph);
this.console.addBinding("g", this.g);
this.remote.connect(Arrays.asList("graph", "g"));
//
Traversal<?, ?> traversal = (Traversal<?, ?>) this.remote.submit(Arrays.asList("g.V().count()"));
assertEquals(6L, traversal.next());
assertFalse(traversal.hasNext());
assertNotNull(this.console.getBindings().get(RemoteAcceptor.RESULT));
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldSupportRemoteSugarTraversal() throws Exception {
SugarTestHelper.clearRegistry(this.graphProvider);
this.console.addBinding("graph", this.graph);
this.console.addBinding("g", this.g);
//
this.remote.connect(Arrays.asList("graph", "g"));
try {
this.remote.submit(Arrays.asList("g.V.name.map{it.length()}.sum"));
fail("Should not allow sugar usage");
} catch (final Exception e) {
// this is good
}
//
this.remote.configure(Arrays.asList("useSugar", "true"));
this.remote.connect(Arrays.asList("graph", "g"));
Traversal<?, ?> traversal = (Traversal<?, ?>) this.remote.submit(Arrays.asList("g.V.name.map{it.length()}.sum"));
assertEquals(28l, traversal.next());
assertFalse(traversal.hasNext());
assertNotNull(this.console.getBindings().get(RemoteAcceptor.RESULT));
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldSupportRemoteGroupTraversal() throws Exception {
SugarTestHelper.clearRegistry(this.graphProvider);
GremlinLoader.load();
this.console.addBinding("graph", this.graph);
this.console.addBinding("g", this.g);
this.remote.connect(Arrays.asList("graph"));
//
this.remote.connect(Arrays.asList("graph", "g"));
Traversal<?, Map<String, List<String>>> traversal = (Traversal<?, Map<String, List<String>>>) this.remote.submit(Arrays.asList("g.V().out().group().by{it.value('name')[1]}.by('name')"));
Map<String, List<String>> map = traversal.next();
assertEquals(3, map.size());
assertEquals(1, map.get("a").size());
assertEquals("vadas", map.get("a").get(0));
assertEquals(1, map.get("i").size());
assertEquals("ripple", map.get("i").get(0));
assertEquals(4, map.get("o").size());
assertTrue(map.get("o").contains("josh"));
assertTrue(map.get("o").contains("lop"));
assertNotNull(this.console.getBindings().get(RemoteAcceptor.RESULT));
//
traversal = (Traversal<?, Map<String, List<String>>>) this.remote.submit(Arrays.asList("g.V().out().group().by(label).by{it.value('name')[1]}"));
map = traversal.next();
assertEquals(2, map.size());
assertEquals(4, map.get("software").size());
assertTrue(map.get("software").contains("o"));
assertTrue(map.get("software").contains("i"));
assertEquals(2, map.get("person").size());
assertTrue(map.get("person").contains("o"));
assertTrue(map.get("person").contains("a"));
assertNotNull(this.console.getBindings().get(RemoteAcceptor.RESULT));
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldSupportHDFSMethods() throws Exception {
List<String> ls = (List<String>) this.console.eval("hdfs.ls()");
for (final String line : ls) {
assertTrue(line.startsWith("-") || line.startsWith("r") || line.startsWith("w") || line.startsWith("x"));
assertEquals(" ", line.substring(9, 10));
}
ls = (List<String>) this.console.eval("fs.ls()");
for (final String line : ls) {
assertTrue(line.startsWith("-") || line.startsWith("r") || line.startsWith("w") || line.startsWith("x"));
assertEquals(" ", line.substring(9, 10));
}
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldGracefullyHandleBadGremlinHadoopLibs() throws Exception {
System.setProperty(Constants.HADOOP_GREMLIN_LIBS, TestHelper.makeTestDataDirectory(HadoopGremlinPluginCheck.class, "shouldGracefullyHandleBadGremlinHadoopLibs"));
this.graph.configuration().setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, true);
this.console.addBinding("graph", this.graph);
this.console.addBinding("g", this.g);
this.remote.connect(Arrays.asList("graph", "g"));
Traversal<?, ?> traversal = (Traversal<?, ?>) this.remote.submit(Arrays.asList("g.V()"));
assertEquals(6, IteratorUtils.count(traversal));
assertNotNull(this.console.getBindings().get(RemoteAcceptor.RESULT));
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldSupportVariousFileSystemsInGremlinHadoopLibs() throws Exception {
// The whole point of this test is to verify that HADOOP_GREMLIN_LIBS may contain paths with or without
// a file system scheme prefix and that either path is properly handled. If all jar files, that were specified
// in HADOOP_GREMLIN_LIBS, are found in the GraphComputers temporary directory after using the GraphComputer
// (by submitting a traversal), the test is considered to be successful.
//
// The traversal will likely never fail, since both - Spark and Giraph - run in the same JVM during tests. This
// is unfortunate as it doesn't allow us to verify that GraphComputers load jars properly in a distributed
// environment. The test would fail in a distributed environment, IF loading the jars specified in
// HADOOP_GREMLIN_LIBS wouldn't work. That is because we generate new jar files on the fly that are definitely
// not part of any existing directory or the current classpath.
final String testDataDirectory = TestHelper.makeTestDataDirectory(HadoopGremlinPluginCheck.class, "shouldHandleLocalGremlinHadoopLibs");
final File jarFile1 = createJarFile(testDataDirectory + File.separator + "1", "Greeter1");
final File jarFile2 = createJarFile(testDataDirectory + File.separator + "2", "Greeter2");
final String graphComputerJarTargetBasePath = System.getProperty("java.io.tmpdir") + File.separator +
"hadoop-gremlin-" + Gremlin.version() + "-libs" + File.separator;
final File graphComputerJarTargetPath1 = new File(graphComputerJarTargetBasePath + "1" + File.separator + "Greeter1.jar");
final File graphComputerJarTargetPath2 = new File(graphComputerJarTargetBasePath + "2" + File.separator + "Greeter2.jar");
for (final boolean withScheme : Arrays.asList(false, true)) {
Stream<String> hadoopGremlinLibs = Arrays.asList(jarFile1, jarFile2).stream().map(f -> f.getParentFile().getAbsolutePath());
if (withScheme) {
hadoopGremlinLibs = hadoopGremlinLibs.map(path -> "file://" + path);
}
System.setProperty(Constants.HADOOP_GREMLIN_LIBS, String.join(File.pathSeparator, hadoopGremlinLibs.collect(Collectors.toList())));
this.graph.configuration().setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, true);
this.console.addBinding("graph", this.graph);
this.console.addBinding("g", this.g);
this.remote.connect(Arrays.asList("graph", "g"));
Traversal<?, ?> traversal = (Traversal<?, ?>) this.remote.submit(Arrays.asList(
"ClassLoader.getSystemClassLoader().addURL('" + jarFile1.toURI().toURL() + "'.toURL());",
"ClassLoader.getSystemClassLoader().addURL('" + jarFile2.toURI().toURL() + "'.toURL());",
"g.V().choose(hasLabel('person'), " +
"values('name').map {Class.forName('Greeter1').hello(it.get())}, " +
"values('name').map {Class.forName('Greeter2').hello(it.get())})"));
final List<String> expectedMessages = Arrays.asList("marko", "josh", "peter", "vadas").stream().
map(name -> "Greeter1 says: Hello " + name + "!").collect(Collectors.toList());
expectedMessages.addAll(Arrays.asList("lop", "ripple").stream().
map(name -> "Greeter2 says: Hello " + name + "!").collect(Collectors.toList()));
while (traversal.hasNext()) {
final String message = (String) traversal.next();
assertTrue(expectedMessages.remove(message));
}
assertEquals(0, expectedMessages.size());
}
assertTrue(graphComputerJarTargetPath1.exists());
assertTrue(graphComputerJarTargetPath2.exists());
assert graphComputerJarTargetPath1.delete();
assert graphComputerJarTargetPath2.delete();
}
private File createJarFile(final String directory, final String className) throws IOException {
new File(directory).mkdirs();
final File classFile = new File(directory + File.separator + className + ".class");
final File jarFile = new File(directory + File.separator + className + ".jar");
jarFile.deleteOnExit();
final JavaStringObject source = new JavaStringObject(className,
"public class " + className + " {\n" +
" public static String hello(final String name) {\n" +
" return \"" + className + " says: Hello \" + name + \"!\";\n" +
" }\n" +
"}");
final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
final List<String> options = Arrays.asList(
"-d", classFile.getParentFile().getAbsolutePath()
);
assert compiler.getTask(null, null, null, options, null, Collections.singletonList(source)).call();
final Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
try (final JarOutputStream target = new JarOutputStream(new FileOutputStream(jarFile), manifest)) {
final JarEntry entry = new JarEntry(classFile.getName());
entry.setTime(classFile.lastModified());
target.putNextEntry(entry);
try (final FileInputStream fis = new FileInputStream(classFile);
final BufferedInputStream in = new BufferedInputStream(fis)) {
final byte buffer[] = new byte[1024];
while (true) {
final int count = in.read(buffer);
if (count < 0) break;
target.write(buffer, 0, count);
}
}
target.closeEntry();
}
assert classFile.delete();
return jarFile;
}
private static class JavaStringObject extends SimpleJavaFileObject {
private final String code;
JavaStringObject(final String className, final String code) {
super(URI.create("string:///" + className.replace(".", "/") + Kind.SOURCE.extension), Kind.SOURCE);
this.code = code;
}
@Override
public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
return code;
}
}
}
| |
package com.matt.forgehax.asm.patches;
import com.matt.forgehax.asm.TypesHook;
import com.matt.forgehax.asm.events.DrawBlockBoundingBoxEvent;
import com.matt.forgehax.asm.utils.ASMHelper;
import com.matt.forgehax.asm.utils.asmtype.ASMMethod;
import com.matt.forgehax.asm.utils.transforming.ClassTransformer;
import com.matt.forgehax.asm.utils.transforming.Inject;
import com.matt.forgehax.asm.utils.transforming.MethodTransformer;
import com.matt.forgehax.asm.utils.transforming.RegisterMethodTransformer;
import org.objectweb.asm.tree.*;
import scala.tools.asm.Type;
import java.util.Objects;
public class RenderGlobalPatch extends ClassTransformer {
public RenderGlobalPatch() {
super(Classes.RenderGlobal);
}
@RegisterMethodTransformer
private class LoadRenderers extends MethodTransformer {
@Override
public ASMMethod getMethod() {
return Methods.RenderGlobal_loadRenderers;
}
@Inject(description = "At hook callback at end of method")
public void inject(MethodNode main) {
AbstractInsnNode node =
ASMHelper.findPattern(
main.instructions.getFirst(),
new int[]{PUTFIELD, 0x00, 0x00, 0x00, RETURN},
"x???x");
Objects.requireNonNull(node, "Find pattern failed for node");
InsnList insnList = new InsnList();
insnList.add(new VarInsnNode(ALOAD, 0)); // push this
insnList.add(ASMHelper.call(GETFIELD, Fields.RenderGlobal_viewFrustum));
insnList.add(new VarInsnNode(ALOAD, 0)); // push this
insnList.add(ASMHelper.call(GETFIELD, Fields.RenderGlobal_renderDispatcher));
insnList.add(ASMHelper.call(INVOKESTATIC, TypesHook.Methods.ForgeHaxHooks_onLoadRenderers));
main.instructions.insert(node, insnList);
}
}
@RegisterMethodTransformer
private class RenderBlockLayer extends MethodTransformer {
@Override
public ASMMethod getMethod() {
return Methods.RenderGlobal_renderBlockLayer;
}
@Inject(description = "Add hooks at the top and bottom of the method")
public void inject(MethodNode main) {
AbstractInsnNode preNode =
ASMHelper.findPattern(
main.instructions.getFirst(),
new int[]{
INVOKESTATIC,
0x00,
0x00,
ALOAD,
GETSTATIC,
IF_ACMPNE,
0x00,
0x00,
ALOAD,
GETFIELD,
GETFIELD
},
"x??xxx??xxx");
AbstractInsnNode postNode =
ASMHelper.findPattern(
main.instructions.getFirst(),
new int[]{ALOAD, GETFIELD, GETFIELD, INVOKEVIRTUAL, 0x00, 0x00, ILOAD, IRETURN},
"xxxx??xx");
Objects.requireNonNull(preNode, "Find pattern failed for preNode");
Objects.requireNonNull(postNode, "Find pattern failed for postNode");
LabelNode endJump = new LabelNode();
InsnList insnPre = new InsnList();
insnPre.add(new InsnNode(ICONST_0));
insnPre.add(new VarInsnNode(ISTORE, 6));
insnPre.add(new VarInsnNode(ALOAD, 1));
insnPre.add(new VarInsnNode(DLOAD, 2));
insnPre.add(
ASMHelper.call(INVOKESTATIC, TypesHook.Methods.ForgeHaxHooks_onPreRenderBlockLayer));
insnPre.add(new JumpInsnNode(IFNE, endJump));
InsnList insnPost = new InsnList();
insnPost.add(new VarInsnNode(ALOAD, 1));
insnPost.add(new VarInsnNode(DLOAD, 2));
insnPost.add(
ASMHelper.call(INVOKESTATIC, TypesHook.Methods.ForgeHaxHooks_onPostRenderBlockLayer));
insnPost.add(endJump);
main.instructions.insertBefore(preNode, insnPre);
main.instructions.insertBefore(postNode, insnPost);
}
}
@RegisterMethodTransformer
private class SetupTerrain extends MethodTransformer {
@Override
public ASMMethod getMethod() {
return Methods.RenderGlobal_setupTerrain;
}
@Inject(description = "Add hook at the top of the method")
public void inject(MethodNode main) {
AbstractInsnNode node =
ASMHelper.findPattern(
main.instructions.getFirst(),
new int[]{ALOAD, GETFIELD, GETFIELD, GETFIELD, ALOAD},
"xxxxx");
Objects.requireNonNull(node, "Find pattern failed for node");
InsnList insnPre = new InsnList();
insnPre.add(new VarInsnNode(ALOAD, 1));
insnPre.add(new VarInsnNode(ILOAD, 6));
insnPre.add(ASMHelper.call(INVOKESTATIC, TypesHook.Methods.ForgeHaxHooks_onSetupTerrain));
insnPre.add(new VarInsnNode(ISTORE, 6));
main.instructions.insertBefore(node, insnPre);
}
@Inject(description = "Add or logic to this.mc.renderChunksMany flag")
public void injectAtFlag(MethodNode main) {
// inject at this.mc.renderChunksMany
AbstractInsnNode node =
ASMHelper.findPattern(
main.instructions.getFirst(),
new int[]{
ISTORE,
0x00,
0x00,
ALOAD,
IFNULL,
0x00,
0x00,
ICONST_0,
ISTORE,
0x00,
0x00,
NEW,
DUP,
ALOAD,
ALOAD,
ACONST_NULL,
CHECKCAST,
ICONST_0,
ACONST_NULL,
INVOKESPECIAL,
ASTORE
},
"x??xx??xx??xxxxxxxxxx");
Objects.requireNonNull(node, "Find pattern failed for node");
LabelNode storeLabel = new LabelNode();
LabelNode falseLabel = new LabelNode();
InsnList insnList = new InsnList();
insnList.add(new JumpInsnNode(IFEQ, falseLabel));
insnList.add(ASMHelper.call(INVOKESTATIC, TypesHook.Methods.ForgeHaxHooks_shouldDisableCaveCulling));
insnList.add(new JumpInsnNode(IFNE, falseLabel));
insnList.add(new InsnNode(ICONST_1));
insnList.add(new JumpInsnNode(GOTO, storeLabel));
insnList.add(falseLabel);
insnList.add(new InsnNode(ICONST_0));
insnList.add(storeLabel);
// iload should be below here
main.instructions.insertBefore(node, insnList);
}
}
@RegisterMethodTransformer
private class DrawBoundingBox extends MethodTransformer {
@Override
public ASMMethod getMethod() {
return Methods.RenderGlobal_drawBoundingBox;
}
@Inject(description = "Add hook at the top of the method")
public void inject(MethodNode main) {
AbstractInsnNode start = main.instructions.getFirst();
AbstractInsnNode end = ASMHelper.findPattern(start, RETURN);
final int eventIndex =
ASMHelper.addNewLocalVariable(
main, "forgehax_event", Type.getDescriptor(DrawBlockBoundingBoxEvent.Pre.class));
InsnList pushArgs = new InsnList();
pushArgs.add(new VarInsnNode(FLOAD, 12));
pushArgs.add(new VarInsnNode(FLOAD, 13));
pushArgs.add(new VarInsnNode(FLOAD, 14));
pushArgs.add(new VarInsnNode(FLOAD, 15));
InsnList newEvent =
ASMHelper.newInstance(
Type.getInternalName(DrawBlockBoundingBoxEvent.Pre.class), "(FFFF)V", pushArgs);
final InsnList pre = new InsnList();
pre.add(newEvent);
pre.add(new VarInsnNode(ASTORE, eventIndex));
pre.add(new VarInsnNode(ALOAD, eventIndex));
pre.add(ASMHelper.call(INVOKESTATIC, TypesHook.Methods.ForgeHaxHooks_fireEvent_v));
pre.add(setColor(eventIndex, "red", 12));
pre.add(setColor(eventIndex, "green", 13));
pre.add(setColor(eventIndex, "blue", 14));
pre.add(setColor(eventIndex, "alpha", 15));
final InsnList post = new InsnList();
post.add(
ASMHelper.call(INVOKESTATIC, TypesHook.Methods.ForgeHaxHooks_onDrawBoundingBox_Post));
main.instructions.insert(start, pre);
main.instructions.insertBefore(end, post);
}
private InsnList setColor(int eventIndex, String field, int colorIndex) {
InsnList list = new InsnList();
list.add(new VarInsnNode(ALOAD, eventIndex));
list.add(
new FieldInsnNode(
GETFIELD, Type.getInternalName(DrawBlockBoundingBoxEvent.class), field, "F"));
list.add(new VarInsnNode(FSTORE, colorIndex));
return list;
}
}
}
| |
/*
* Copyright (c) 2007, Swedish Institute of Computer Science.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the Institute nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
*/
package org.contikios.cooja.mspmote;
import java.awt.Component;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Hashtable;
import org.apache.log4j.Logger;
import org.jdom.Element;
import org.contikios.cooja.ContikiError;
import org.contikios.cooja.Cooja;
import org.contikios.cooja.Mote;
import org.contikios.cooja.MoteInterface;
import org.contikios.cooja.MoteInterfaceHandler;
import org.contikios.cooja.MoteType;
import org.contikios.cooja.Simulation;
import org.contikios.cooja.Watchpoint;
import org.contikios.cooja.WatchpointMote;
import org.contikios.cooja.interfaces.IPAddress;
import org.contikios.cooja.mote.memory.MemoryInterface;
import org.contikios.cooja.motes.AbstractEmulatedMote;
import org.contikios.cooja.mspmote.interfaces.Msp802154Radio;
import org.contikios.cooja.mspmote.interfaces.MspSerial;
import org.contikios.cooja.mspmote.plugins.CodeVisualizerSkin;
import org.contikios.cooja.mspmote.plugins.MspBreakpoint;
import org.contikios.cooja.plugins.Visualizer;
import se.sics.mspsim.cli.CommandContext;
import se.sics.mspsim.cli.CommandHandler;
import se.sics.mspsim.cli.LineListener;
import se.sics.mspsim.cli.LineOutputStream;
import se.sics.mspsim.core.EmulationException;
import se.sics.mspsim.core.LogListener;
import se.sics.mspsim.core.Loggable;
import se.sics.mspsim.core.MSP430;
import se.sics.mspsim.core.EmulationLogger.WarningType;
import se.sics.mspsim.platform.GenericNode;
import se.sics.mspsim.ui.ManagedWindow;
import se.sics.mspsim.ui.WindowManager;
import se.sics.mspsim.util.ComponentRegistry;
import se.sics.mspsim.util.ConfigManager;
import se.sics.mspsim.util.DebugInfo;
import se.sics.mspsim.util.ELF;
import se.sics.mspsim.util.MapEntry;
import se.sics.mspsim.util.MapTable;
import se.sics.mspsim.profiler.SimpleProfiler;
import org.contikios.cooja.mspmote.interfaces.MspClock;
/**
* @author Fredrik Osterlind
*/
public abstract class MspMote extends AbstractEmulatedMote implements Mote, WatchpointMote {
private static Logger logger = Logger.getLogger(MspMote.class);
private final static int EXECUTE_DURATION_US = 1; /* We always execute in 1 us steps */
{
Visualizer.registerVisualizerSkin(CodeVisualizerSkin.class);
}
private CommandHandler commandHandler;
private MSP430 myCpu = null;
private MspMoteType myMoteType = null;
private MspMoteMemory myMemory = null;
private MoteInterfaceHandler myMoteInterfaceHandler = null;
public ComponentRegistry registry = null;
/* Stack monitoring variables */
private boolean stopNextInstruction = false;
public GenericNode mspNode = null;
public MspMote(MspMoteType moteType, Simulation simulation) {
this.simulation = simulation;
myMoteType = moteType;
/* Schedule us immediately */
requestImmediateWakeup();
}
protected void initMote() {
if (myMoteType != null) {
initEmulator(myMoteType.getContikiFirmwareFile());
myMoteInterfaceHandler = createMoteInterfaceHandler();
/* TODO Create COOJA-specific window manager */
registry.removeComponent("windowManager");
registry.registerComponent("windowManager", new WindowManager() {
public ManagedWindow createWindow(String name) {
return new ManagedWindow() {
public void setVisible(boolean b) {
logger.warn("setVisible() ignored");
}
public void setTitle(String string) {
logger.warn("setTitle() ignored");
}
public void setSize(int width, int height) {
logger.warn("setSize() ignored");
}
public void setBounds(int x, int y, int width, int height) {
logger.warn("setBounds() ignored");
}
public void removeAll() {
logger.warn("removeAll() ignored");
}
public void pack() {
logger.warn("pack() ignored");
}
public boolean isVisible() {
logger.warn("isVisible() return false");
return false;
}
public String getTitle() {
logger.warn("getTitle() return \"\"");
return "";
}
public void add(Component component) {
logger.warn("add() ignored");
}
};
}
});
try {
debuggingInfo = ((MspMoteType)getType()).getFirmwareDebugInfo();
} catch (IOException e) {
throw (RuntimeException) new RuntimeException("Error: " + e.getMessage()).initCause(e);
}
}
}
/**
* Abort execution immediately.
* May for example be called by a breakpoint handler.
*/
public void stopNextInstruction() {
stopNextInstruction = true;
getCPU().stop();
}
protected MoteInterfaceHandler createMoteInterfaceHandler() {
return new MoteInterfaceHandler(this, getType().getMoteInterfaceClasses());
}
/**
* @return MSP430 CPU
*/
public MSP430 getCPU() {
return myCpu;
}
public void setCPU(MSP430 cpu) {
myCpu = cpu;
}
@Override
public MemoryInterface getMemory() {
return myMemory;
}
public void setMemory(MspMoteMemory memory) {
myMemory = memory;
}
/**
* Prepares CPU, memory and ELF module.
*
* @param fileELF ELF file
* @param cpu MSP430 cpu
* @throws IOException Preparing mote failed
*/
protected void prepareMote(File fileELF, GenericNode node) throws IOException {
this.commandHandler = new CommandHandler(System.out, System.err);
this.mspNode = node;
node.setCommandHandler(commandHandler);
ConfigManager config = new ConfigManager();
node.setup(config);
this.myCpu = node.getCPU();
this.myCpu.setMonitorExec(true);
this.myCpu.setTrace(0); /* TODO Enable */
LogListener ll = new LogListener() {
private Logger mlogger = Logger.getLogger("MSPSim");
@Override
public void log(Loggable source, String message) {
//mlogger.debug("" + getID() + ": " + source.getID() + ": " + message);
}
@Override
public void logw(Loggable source, WarningType type, String message) throws EmulationException {
mlogger.warn("" + getID() +": " + "# " + source.getID() + "[" + type + "]: " + message);
}
};
this.myCpu.getLogger().addLogListener(ll);
logger.info("Loading firmware from: " + fileELF.getAbsolutePath());
Cooja.setProgressMessage("Loading " + fileELF.getName());
node.loadFirmware(((MspMoteType)getType()).getELF());
/* Throw exceptions at bad memory access */
/*myCpu.setThrowIfWarning(true);*/
/* Create mote address memory */
MapTable map = ((MspMoteType)getType()).getELF().getMap();
MapEntry[] allEntries = map.getAllEntries();
myMemory = new MspMoteMemory(this, allEntries, myCpu);
myCpu.reset();
}
public CommandHandler getCLICommandHandler() {
return commandHandler;
}
/* called when moteID is updated */
public void idUpdated(int newID) {
}
public MoteType getType() {
return myMoteType;
}
public void setType(MoteType type) {
myMoteType = (MspMoteType) type;
}
public MoteInterfaceHandler getInterfaces() {
return myMoteInterfaceHandler;
}
public void setInterfaces(MoteInterfaceHandler moteInterfaceHandler) {
myMoteInterfaceHandler = moteInterfaceHandler;
}
/**
* Initializes emulator by creating CPU, memory and node object.
*
* @param ELFFile ELF file
* @return True if successful
*/
protected abstract boolean initEmulator(File ELFFile);
private boolean booted = false;
public void simTimeChanged(long diff) {
/* Compensates for simulation time changes (without simulation execution) */
lastExecute -= diff;
nextExecute -= diff;
scheduleNextWakeup(nextExecute);
}
private long lastExecute = -1; /* Last time mote executed */
private long nextExecute;
private long executed = 0;
private long skipped = 0;
public void execute(long time) {
execute(time, EXECUTE_DURATION_US);
}
public void execute(long t, int duration) {
MspClock clock = ((MspClock) (myMoteInterfaceHandler.getClock()));
double deviation = clock.getDeviation();
long drift = clock.getDrift();
/* Wait until mote boots */
if (!booted && clock.getTime() < 0) {
scheduleNextWakeup(t - clock.getTime());
return;
}
booted = true;
if (stopNextInstruction) {
stopNextInstruction = false;
scheduleNextWakeup(t);
throw new RuntimeException("MSPSim requested simulation stop");
}
if (lastExecute < 0) {
/* Always execute one microsecond the first time */
lastExecute = t;
}
if (t < lastExecute) {
throw new RuntimeException("Bad event ordering: " + lastExecute + " < " + t);
}
if (((1-deviation) * executed) > skipped) {
lastExecute = lastExecute + duration; // (t+duration) - (t-lastExecute);
nextExecute = t+duration;
skipped += duration;
scheduleNextWakeup(nextExecute);
}
/* Execute MSPSim-based mote */
/* TODO Try-catch overhead */
try {
nextExecute = myCpu.stepMicros(Math.max(0, t-lastExecute), duration) + t + duration;
lastExecute = t;
} catch (EmulationException e) {
String trace = e.getMessage() + "\n\n" + getStackTrace();
throw (ContikiError)
new ContikiError(trace).initCause(e);
}
/* Schedule wakeup */
if (nextExecute < t) {
throw new RuntimeException(t + ": MSPSim requested early wakeup: " + nextExecute);
}
/*logger.debug(t + ": Schedule next wakeup at " + nextExecute);*/
executed += duration;
scheduleNextWakeup(nextExecute);
if (stopNextInstruction) {
stopNextInstruction = false;
throw new RuntimeException("MSPSim requested simulation stop");
}
/* XXX TODO Reimplement stack monitoring using MSPSim internals */
/*if (monitorStackUsage) {
int newStack = cpu.reg[MSP430.SP];
if (newStack < stackPointerLow && newStack > 0) {
stackPointerLow = cpu.reg[MSP430.SP];
// Check if stack is writing in memory
if (stackPointerLow < heapStartAddress) {
stackOverflowObservable.signalStackOverflow();
stopNextInstruction = true;
getSimulation().stopSimulation();
}
}
}*/
}
public String getStackTrace() {
return executeCLICommand("stacktrace");
}
public int executeCLICommand(String cmd, CommandContext context) {
return commandHandler.executeCommand(cmd, context);
}
public String executeCLICommand(String cmd) {
final StringBuilder sb = new StringBuilder();
LineListener ll = new LineListener() {
public void lineRead(String line) {
sb.append(line).append("\n");
}
};
PrintStream po = new PrintStream(new LineOutputStream(ll));
CommandContext c = new CommandContext(commandHandler, null, "", new String[0], 1, null);
c.out = po;
c.err = po;
if (0 != executeCLICommand(cmd, c)) {
sb.append("\nWarning: command failed");
}
return sb.toString();
}
public int getCPUFrequency() {
return myCpu.getDCOFrequency();
}
public int getID() {
return getInterfaces().getMoteID().getMoteID();
}
public boolean setConfigXML(Simulation simulation, Collection<Element> configXML, boolean visAvailable) {
setSimulation(simulation);
if (myMoteInterfaceHandler == null) {
myMoteInterfaceHandler = createMoteInterfaceHandler();
}
try {
debuggingInfo = ((MspMoteType)getType()).getFirmwareDebugInfo();
} catch (IOException e) {
throw (RuntimeException) new RuntimeException("Error: " + e.getMessage()).initCause(e);
}
for (Element element: configXML) {
String name = element.getName();
if (name.equals("motetype_identifier")) {
/* Ignored: handled by simulation */
} else if ("breakpoints".equals(element.getName())) {
setWatchpointConfigXML(element.getChildren(), visAvailable);
} else if (name.equals("interface_config")) {
String intfClass = element.getText().trim();
/* Backwards compatibility: se.sics -> org.contikios */
if (intfClass.startsWith("se.sics")) {
intfClass = intfClass.replaceFirst("se\\.sics", "org.contikios");
}
if (intfClass.equals("org.contikios.cooja.mspmote.interfaces.MspIPAddress")) {
intfClass = IPAddress.class.getName();
}
if (intfClass.equals("org.contikios.cooja.mspmote.interfaces.ESBLog")) {
intfClass = MspSerial.class.getName();
}
if (intfClass.equals("org.contikios.cooja.mspmote.interfaces.SkyByteRadio")) {
intfClass = Msp802154Radio.class.getName();
}
if (intfClass.equals("org.contikios.cooja.mspmote.interfaces.SkySerial")) {
intfClass = MspSerial.class.getName();
}
Class<? extends MoteInterface> moteInterfaceClass = simulation.getCooja().tryLoadClass(
this, MoteInterface.class, intfClass);
if (moteInterfaceClass == null) {
logger.fatal("Could not load mote interface class: " + intfClass);
return false;
}
MoteInterface moteInterface = getInterfaces().getInterfaceOfType(moteInterfaceClass);
if (moteInterface == null) {
logger.fatal("Could not find mote interface of class: " + moteInterfaceClass);
return false;
}
moteInterface.setConfigXML(element.getChildren(), visAvailable);
}
}
/* Schedule us immediately */
requestImmediateWakeup();
return true;
}
public Collection<Element> getConfigXML() {
ArrayList<Element> config = new ArrayList<Element>();
Element element;
/* Breakpoints */
element = new Element("breakpoints");
element.addContent(getWatchpointConfigXML());
config.add(element);
// Mote interfaces
for (MoteInterface moteInterface: getInterfaces().getInterfaces()) {
element = new Element("interface_config");
element.setText(moteInterface.getClass().getName());
Collection<Element> interfaceXML = moteInterface.getConfigXML();
if (interfaceXML != null) {
element.addContent(interfaceXML);
config.add(element);
}
}
return config;
}
public String getExecutionDetails() {
return executeCLICommand("stacktrace");
}
public String getPCString() {
int pc = myCpu.getPC();
ELF elf = myCpu.getRegistry().getComponent(ELF.class);
DebugInfo di = elf.getDebugInfo(pc);
/* Following code examples from MSPsim, DebugCommands.java */
if (di == null) {
di = elf.getDebugInfo(pc + 1);
}
if (di == null) {
/* Return PC value */
SimpleProfiler sp = (SimpleProfiler)myCpu.getProfiler();
try {
MapEntry mapEntry = sp.getCallMapEntry(0);
if (mapEntry != null) {
String file = mapEntry.getFile();
if (file != null) {
if (file.indexOf('/') >= 0) {
file = file.substring(file.lastIndexOf('/')+1);
}
}
String name = mapEntry.getName();
return file + ":?:" + name;
}
return String.format("*%02x", pc);
} catch (Exception e) {
return null;
}
}
int lineNo = di.getLine();
String file = di.getFile();
file = file==null?"?":file;
if (file.contains("/")) {
/* strip path */
file = file.substring(file.lastIndexOf('/')+1, file.length());
}
String function = di.getFunction();
function = function==null?"":function;
if (function.contains(":")) {
/* strip arguments */
function = function.substring(0, function.lastIndexOf(':'));
}
if (function.equals("* not available")) {
function = "?";
}
return file + ":" + lineNo + ":" + function;
/*return executeCLICommand("line " + myCpu.getPC());*/
}
/* WatchpointMote */
private ArrayList<WatchpointListener> watchpointListeners = new ArrayList<WatchpointListener>();
private ArrayList<MspBreakpoint> watchpoints = new ArrayList<MspBreakpoint>();
private Hashtable<File, Hashtable<Integer, Integer>> debuggingInfo = null;
public void addWatchpointListener(WatchpointListener listener) {
watchpointListeners.add(listener);
}
public void removeWatchpointListener(WatchpointListener listener) {
watchpointListeners.remove(listener);
}
public WatchpointListener[] getWatchpointListeners() {
return watchpointListeners.toArray(new WatchpointListener[0]);
}
public Watchpoint addBreakpoint(File codeFile, int lineNr, int address) {
MspBreakpoint bp = new MspBreakpoint(this, address, codeFile, new Integer(lineNr));
watchpoints.add(bp);
for (WatchpointListener listener: watchpointListeners) {
listener.watchpointsChanged();
}
return bp;
}
public void removeBreakpoint(Watchpoint watchpoint) {
((MspBreakpoint)watchpoint).unregisterBreakpoint();
watchpoints.remove(watchpoint);
for (WatchpointListener listener: watchpointListeners) {
listener.watchpointsChanged();
}
}
public Watchpoint[] getBreakpoints() {
return watchpoints.toArray(new Watchpoint[0]);
}
public boolean breakpointExists(int address) {
if (address < 0) {
return false;
}
for (Watchpoint watchpoint: watchpoints) {
if (watchpoint.getExecutableAddress() == address) {
return true;
}
}
return false;
}
public boolean breakpointExists(File file, int lineNr) {
for (Watchpoint watchpoint: watchpoints) {
if (watchpoint.getCodeFile() == null) {
continue;
}
if (watchpoint.getCodeFile().compareTo(file) != 0) {
continue;
}
if (watchpoint.getLineNumber() != lineNr) {
continue;
}
return true;
}
return false;
}
public int getExecutableAddressOf(File file, int lineNr) {
if (file == null || lineNr < 0 || debuggingInfo == null) {
return -1;
}
/* Match file */
Hashtable<Integer, Integer> lineTable = debuggingInfo.get(file);
if (lineTable == null) {
for (File f: debuggingInfo.keySet()) {
if (f != null && f.getName().equals(file.getName())) {
lineTable = debuggingInfo.get(f);
break;
}
}
}
if (lineTable == null) {
return -1;
}
/* Match line number */
Integer address = lineTable.get(lineNr);
if (address != null) {
for (Integer l: lineTable.keySet()) {
if (l != null && l.intValue() == lineNr) {
/* Found line address */
return lineTable.get(l);
}
}
}
return -1;
}
private long lastBreakpointCycles = -1;
public void signalBreakpointTrigger(MspBreakpoint b) {
if (lastBreakpointCycles == myCpu.cycles) {
return;
}
lastBreakpointCycles = myCpu.cycles;
if (b.stopsSimulation() && getSimulation().isRunning()) {
/* Stop simulation immediately */
stopNextInstruction();
}
/* Notify listeners */
WatchpointListener[] listeners = getWatchpointListeners();
for (WatchpointListener listener: listeners) {
listener.watchpointTriggered(b);
}
}
public Collection<Element> getWatchpointConfigXML() {
ArrayList<Element> config = new ArrayList<Element>();
Element element;
for (MspBreakpoint breakpoint: watchpoints) {
element = new Element("breakpoint");
element.addContent(breakpoint.getConfigXML());
config.add(element);
}
return config;
}
public boolean setWatchpointConfigXML(Collection<Element> configXML, boolean visAvailable) {
for (Element element : configXML) {
if (element.getName().equals("breakpoint")) {
MspBreakpoint breakpoint = new MspBreakpoint(this);
if (!breakpoint.setConfigXML(element.getChildren(), visAvailable)) {
logger.warn("Could not restore breakpoint: " + breakpoint);
} else {
watchpoints.add(breakpoint);
}
}
}
return true;
}
}
| |
/**********************************************************************************
*
* Copyright (c) 2015 The Sakai Foundation
*
* Original developers:
*
* New York University
* Payten Giles
* Mark Triggs
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.pasystem.impl.popups;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Clob;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.sakaiproject.pasystem.api.AcknowledgementType;
import org.sakaiproject.pasystem.api.Acknowledger;
import org.sakaiproject.pasystem.api.MissingUuidException;
import org.sakaiproject.pasystem.api.Popup;
import org.sakaiproject.pasystem.api.Popups;
import org.sakaiproject.pasystem.api.TemplateStream;
import org.sakaiproject.pasystem.impl.acknowledgements.AcknowledgementStorage;
import org.sakaiproject.pasystem.impl.common.DB;
import org.sakaiproject.pasystem.impl.common.DBAction;
import org.sakaiproject.pasystem.impl.common.DBConnection;
import org.sakaiproject.pasystem.impl.common.DBResults;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Query and store Popup objects in the database.
*/
public class PopupStorage implements Popups, Acknowledger {
private static final Logger LOG = LoggerFactory.getLogger(PopupStorage.class);
@Override
public String createCampaign(Popup popup,
TemplateStream templateInput,
Optional<List<String>> assignToUsers) {
return DB.transaction
("Popup creation",
new DBAction<String>() {
@Override
public String call(DBConnection db) throws SQLException {
String uuid = UUID.randomUUID().toString();
db.run("INSERT INTO pasystem_popup_screens (uuid, descriptor, start_time, end_time, open_campaign) VALUES (?, ?, ?, ?, ?)")
.param(uuid)
.param(popup.getDescriptor())
.param(popup.getStartTime())
.param(popup.getEndTime())
.param(popup.isOpenCampaign() ? 1 : 0)
.executeUpdate();
setPopupContent(db, uuid, templateInput);
setPopupAssignees(db, uuid, assignToUsers);
db.commit();
return uuid;
}
}
);
}
@Override
public void updateCampaign(Popup popup,
Optional<TemplateStream> templateInput,
Optional<List<String>> assignToUsers) {
try {
final String uuid = popup.getUuid();
DB.transaction
("Update an existing popup campaign",
new DBAction<Void>() {
@Override
public Void call(DBConnection db) throws SQLException {
db.run("UPDATE pasystem_popup_screens SET descriptor = ?, start_time = ?, end_time = ?, open_campaign = ? WHERE uuid = ?")
.param(popup.getDescriptor())
.param(popup.getStartTime())
.param(popup.getEndTime())
.param(popup.isOpenCampaign() ? 1 : 0)
.param(uuid)
.executeUpdate();
setPopupAssignees(db, uuid, assignToUsers);
if (templateInput.isPresent()) {
setPopupContent(db, uuid, templateInput.get());
}
db.commit();
LOG.info("Update of popup {} completed", uuid);
return null;
}
}
);
} catch (MissingUuidException e) {
throw new RuntimeException("Can't update a popup with no UUID specified", e);
}
}
@Override
public List<Popup> getAll() {
return DB.transaction
("Find all popups",
new DBAction<List<Popup>>() {
@Override
public List<Popup> call(DBConnection db) throws SQLException {
List<Popup> popups = new ArrayList<Popup>();
try (DBResults results = db.run("SELECT * from pasystem_popup_screens")
.executeQuery()) {
for (ResultSet result : results) {
popups.add(Popup.create(result.getString("uuid"),
result.getString("descriptor"),
result.getLong("start_time"),
result.getLong("end_time"),
result.getInt("open_campaign") == 1));
}
return popups;
}
}
}
);
}
@Override
public String getPopupContent(final String uuid) {
return DB.transaction
("Get the content for a popup",
new DBAction<String>() {
@Override
public String call(DBConnection db) throws SQLException {
try (DBResults results = db.run("SELECT template_content from pasystem_popup_content where uuid = ?")
.param(uuid)
.executeQuery()) {
for (ResultSet result : results) {
Clob contentClob = result.getClob("template_content");
return contentClob.getSubString(1, (int) contentClob.length());
}
return "";
}
}
}
);
}
@Override
public Optional<Popup> getForId(final String uuid) {
return DB.transaction
("Find a popup by uuid",
new DBAction<Optional<Popup>>() {
@Override
public Optional<Popup> call(DBConnection db) throws SQLException {
try (DBResults results = db.run("SELECT * from pasystem_popup_screens WHERE UUID = ?")
.param(uuid)
.executeQuery()) {
for (ResultSet result : results) {
return Optional.of(Popup.create(result.getString("uuid"),
result.getString("descriptor"),
result.getLong("start_time"),
result.getLong("end_time"),
result.getInt("open_campaign") == 1));
}
return Optional.empty();
}
}
}
);
}
@Override
public List<String> getAssignees(final String uuid) {
return DB.transaction
("Find a list of assignees by popup uuid",
new DBAction<List<String>>() {
@Override
public List<String> call(DBConnection db) throws SQLException {
List<String> users = new ArrayList<String>();
try (DBResults results = db.run("SELECT user_eid from pasystem_popup_assign WHERE UUID = ? AND user_eid is not NULL")
.param(uuid)
.executeQuery()) {
for (ResultSet result : results) {
users.add(result.getString("user_eid"));
}
return users;
}
}
}
);
}
private void setPopupContent(DBConnection db, String uuid, TemplateStream templateContent) throws SQLException {
// A little hoop jumping here to avoid having to rewind the InputStream
//
// Add an empty record if one is missing
try {
db.run("INSERT INTO pasystem_popup_content (uuid) VALUES (?)")
.param(uuid)
.executeUpdate();
} catch (SQLException e) {
// Expected for updates
}
// Set the content CLOB
db.run("UPDATE pasystem_popup_content set template_content = ? WHERE uuid = ?")
.param(new InputStreamReader(templateContent.getInputStream()),
templateContent.getLength())
.param(uuid)
.executeUpdate();
}
private void setPopupAssignees(DBConnection db, String uuid, Optional<List<String>> assignToUsers) throws SQLException {
if (assignToUsers.isPresent()) {
db.run("DELETE FROM pasystem_popup_assign where uuid = ? AND user_eid is not NULL")
.param(uuid)
.executeUpdate();
for (String userEid : assignToUsers.get()) {
db.run("INSERT INTO pasystem_popup_assign (uuid, user_eid) VALUES (?, ?)")
.param(uuid)
.param(userEid)
.executeUpdate();
}
}
}
@Override
public boolean deleteCampaign(final String uuid) {
return DB.transaction
("Delete an existing popup campaign",
new DBAction<Boolean>() {
@Override
public Boolean call(DBConnection db) throws SQLException {
db.run("DELETE FROM pasystem_popup_assign where uuid = ?")
.param(uuid)
.executeUpdate();
db.run("DELETE FROM pasystem_popup_dismissed where uuid = ?")
.param(uuid)
.executeUpdate();
db.run("DELETE FROM pasystem_popup_content where uuid = ?")
.param(uuid)
.executeUpdate();
db.run("DELETE FROM pasystem_popup_screens WHERE uuid = ?")
.param(uuid)
.executeUpdate();
db.commit();
return true;
}
}
);
}
@Override
public void acknowledge(final String uuid, final String userEid, final AcknowledgementType acknowledgementType) {
new AcknowledgementStorage(AcknowledgementStorage.NotificationType.POPUP).acknowledge(uuid, userEid, acknowledgementType);
}
@Override
public void acknowledge(final String uuid, final String userEid) {
acknowledge(uuid, userEid, AcknowledgementType.TEMPORARY);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.async;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.impl.Log4jLogEvent;
import org.apache.logging.log4j.core.jmx.RingBufferAdmin;
import org.apache.logging.log4j.core.util.Constants;
import org.apache.logging.log4j.message.ReusableMessage;
import org.apache.logging.log4j.status.StatusLogger;
import com.lmax.disruptor.EventFactory;
import com.lmax.disruptor.EventTranslatorTwoArg;
import com.lmax.disruptor.ExceptionHandler;
import com.lmax.disruptor.RingBuffer;
import com.lmax.disruptor.Sequence;
import com.lmax.disruptor.SequenceReportingEventHandler;
import com.lmax.disruptor.WaitStrategy;
import com.lmax.disruptor.dsl.Disruptor;
import com.lmax.disruptor.dsl.ProducerType;
/**
* Helper class decoupling the {@code AsyncLoggerConfig} class from the LMAX Disruptor library.
* <p>
* {@code AsyncLoggerConfig} is a plugin, and will be loaded even if users do not configure any {@code <asyncLogger>} or
* {@code <asyncRoot>} elements in the configuration. If {@code AsyncLoggerConfig} has inner classes that extend or
* implement classes from the Disruptor library, a {@code NoClassDefFoundError} is thrown if the Disruptor jar is not in
* the classpath when the PluginManager loads the {@code AsyncLoggerConfig} plugin from the pre-defined plugins
* definition file.
* <p>
* This class serves to make the dependency on the Disruptor optional, so that these classes are only loaded when the
* {@code AsyncLoggerConfig} is actually used.
*/
public class AsyncLoggerConfigDisruptor implements AsyncLoggerConfigDelegate {
private static final int MAX_DRAIN_ATTEMPTS_BEFORE_SHUTDOWN = 200;
private static final int SLEEP_MILLIS_BETWEEN_DRAIN_ATTEMPTS = 50;
private static final Logger LOGGER = StatusLogger.getLogger();
private int ringBufferSize;
private AsyncEventRouter asyncEventRouter;
/**
* RingBuffer events contain all information necessary to perform the work in a separate thread.
*/
public static class Log4jEventWrapper {
private AsyncLoggerConfig loggerConfig;
private LogEvent event;
/**
* Release references held by ring buffer to allow objects to be garbage-collected.
*/
public void clear() {
loggerConfig = null;
event = null;
}
@Override
public String toString() {
return String.valueOf(event);
}
}
/**
* EventHandler performs the work in a separate thread.
*/
private static class Log4jEventWrapperHandler implements SequenceReportingEventHandler<Log4jEventWrapper> {
private static final int NOTIFY_PROGRESS_THRESHOLD = 50;
private Sequence sequenceCallback;
private int counter;
@Override
public void setSequenceCallback(final Sequence sequenceCallback) {
this.sequenceCallback = sequenceCallback;
}
@Override
public void onEvent(final Log4jEventWrapper event, final long sequence, final boolean endOfBatch)
throws Exception {
event.event.setEndOfBatch(endOfBatch);
event.loggerConfig.asyncCallAppenders(event.event);
event.clear();
notifyIntermediateProgress(sequence);
}
/**
* Notify the BatchEventProcessor that the sequence has progressed. Without this callback the sequence would not
* be progressed until the batch has completely finished.
*/
private void notifyIntermediateProgress(final long sequence) {
if (++counter > NOTIFY_PROGRESS_THRESHOLD) {
sequenceCallback.set(sequence);
counter = 0;
}
}
}
/**
* Factory used to populate the RingBuffer with events. These event objects are then re-used during the life of the
* RingBuffer.
*/
private static final EventFactory<Log4jEventWrapper> FACTORY = new EventFactory<Log4jEventWrapper>() {
@Override
public Log4jEventWrapper newInstance() {
return new Log4jEventWrapper();
}
};
/**
* Object responsible for passing on data to a specific RingBuffer event.
*/
private static final EventTranslatorTwoArg<Log4jEventWrapper, LogEvent, AsyncLoggerConfig> TRANSLATOR =
new EventTranslatorTwoArg<Log4jEventWrapper, LogEvent, AsyncLoggerConfig>() {
@Override
public void translateTo(final Log4jEventWrapper ringBufferElement, final long sequence,
final LogEvent logEvent, final AsyncLoggerConfig loggerConfig) {
ringBufferElement.event = logEvent;
ringBufferElement.loggerConfig = loggerConfig;
}
};
private static final ThreadFactory THREAD_FACTORY = new DaemonThreadFactory("AsyncLoggerConfig-");
private volatile Disruptor<Log4jEventWrapper> disruptor;
private ExecutorService executor;
private long backgroundThreadId; // LOG4J2-471
public AsyncLoggerConfigDisruptor() {
}
/**
* Increases the reference count and creates and starts a new Disruptor and associated thread if none currently
* exists.
*
* @see #stop()
*/
public synchronized void start() {
if (disruptor != null) {
LOGGER.trace("AsyncLoggerConfigDisruptor not starting new disruptor for this configuration, "
+ "using existing object.");
return;
}
LOGGER.trace("AsyncLoggerConfigDisruptor creating new disruptor for this configuration.");
ringBufferSize = DisruptorUtil.calculateRingBufferSize("AsyncLoggerConfig.RingBufferSize");
final WaitStrategy waitStrategy = DisruptorUtil.createWaitStrategy("AsyncLoggerConfig.WaitStrategy");
executor = Executors.newSingleThreadExecutor(THREAD_FACTORY);
backgroundThreadId = DisruptorUtil.getExecutorThreadId(executor);
asyncEventRouter = AsyncEventRouterFactory.create();
disruptor = new Disruptor<>(FACTORY, ringBufferSize, executor, ProducerType.MULTI, waitStrategy);
final ExceptionHandler<Log4jEventWrapper> errorHandler = DisruptorUtil.getAsyncLoggerConfigExceptionHandler();
disruptor.handleExceptionsWith(errorHandler);
final Log4jEventWrapperHandler[] handlers = {new Log4jEventWrapperHandler()};
disruptor.handleEventsWith(handlers);
LOGGER.debug("Starting AsyncLoggerConfig disruptor for this configuration with ringbufferSize={}, "
+ "waitStrategy={}, exceptionHandler={}...", disruptor.getRingBuffer().getBufferSize(), waitStrategy
.getClass().getSimpleName(), errorHandler);
disruptor.start();
}
/**
* Decreases the reference count. If the reference count reached zero, the Disruptor and its associated thread are
* shut down and their references set to {@code null}.
*/
public synchronized void stop() {
final Disruptor<Log4jEventWrapper> temp = disruptor;
if (temp == null) {
LOGGER.trace("AsyncLoggerConfigDisruptor: disruptor for this configuration already shut down.");
return; // disruptor was already shut down by another thread
}
LOGGER.trace("AsyncLoggerConfigDisruptor: shutting down disruptor for this configuration.");
// We must guarantee that publishing to the RingBuffer has stopped before we call disruptor.shutdown().
disruptor = null; // client code fails with NPE if log after stop = OK
// Calling Disruptor.shutdown() will wait until all enqueued events are fully processed,
// but this waiting happens in a busy-spin. To avoid (postpone) wasting CPU,
// we sleep in short chunks, up to 10 seconds, waiting for the ringbuffer to drain.
for (int i = 0; hasBacklog(temp) && i < MAX_DRAIN_ATTEMPTS_BEFORE_SHUTDOWN; i++) {
try {
Thread.sleep(SLEEP_MILLIS_BETWEEN_DRAIN_ATTEMPTS); // give up the CPU for a while
} catch (final InterruptedException e) { // ignored
}
}
temp.shutdown(); // busy-spins until all events currently in the disruptor have been processed
LOGGER.trace("AsyncLoggerConfigDisruptor: shutting down disruptor executor for this configuration.");
executor.shutdown(); // finally, kill the processor thread
executor = null; // release reference to allow GC
if (DiscardingAsyncEventRouter.getDiscardCount(asyncEventRouter) > 0) {
LOGGER.trace("AsyncLoggerConfigDisruptor: {} discarded {} events.", asyncEventRouter,
DiscardingAsyncEventRouter.getDiscardCount(asyncEventRouter));
}
}
/**
* Returns {@code true} if the specified disruptor still has unprocessed events.
*/
private static boolean hasBacklog(final Disruptor<?> theDisruptor) {
final RingBuffer<?> ringBuffer = theDisruptor.getRingBuffer();
return !ringBuffer.hasAvailableCapacity(ringBuffer.getBufferSize());
}
@Override
public EventRoute getEventRoute(final Level logLevel) {
final int remainingCapacity = remainingDisruptorCapacity();
if (remainingCapacity < 0) {
return EventRoute.DISCARD;
}
return asyncEventRouter.getRoute(backgroundThreadId, logLevel);
}
private int remainingDisruptorCapacity() {
final Disruptor<Log4jEventWrapper> temp = disruptor;
if (hasLog4jBeenShutDown(temp)) {
return -1;
}
return (int) temp.getRingBuffer().remainingCapacity();
}
/**
* Returns {@code true} if the specified disruptor is null.
*/
private boolean hasLog4jBeenShutDown(final Disruptor<Log4jEventWrapper> aDisruptor) {
if (aDisruptor == null) { // LOG4J2-639
LOGGER.warn("Ignoring log event after log4j was shut down");
return true;
}
return false;
}
@Override
public void enqueueEvent(final LogEvent event, final AsyncLoggerConfig asyncLoggerConfig) {
// LOG4J2-639: catch NPE if disruptor field was set to null after our check above
try {
final LogEvent logEvent = prepareEvent(event);
enqueue(logEvent, asyncLoggerConfig);
} catch (final NullPointerException npe) {
// Note: NPE prevents us from adding a log event to the disruptor after it was shut down,
// which could cause the publishEvent method to hang and never return.
LOGGER.warn("Ignoring log event after log4j was shut down.");
}
}
private LogEvent prepareEvent(final LogEvent event) {
final LogEvent logEvent = ensureImmutable(event);
if (logEvent instanceof Log4jLogEvent && logEvent.getMessage() instanceof ReusableMessage) {
((Log4jLogEvent) logEvent).makeMessageImmutable();
} else if (!Constants.FORMAT_MESSAGES_IN_BACKGROUND) { // LOG4J2-898: user may choose
logEvent.getMessage().getFormattedMessage(); // LOG4J2-763: ask message to freeze parameters
}
return logEvent;
}
private void enqueue(final LogEvent logEvent, final AsyncLoggerConfig asyncLoggerConfig) {
disruptor.getRingBuffer().publishEvent(TRANSLATOR, logEvent, asyncLoggerConfig);
}
@Override
public boolean tryEnqueue(final LogEvent event, final AsyncLoggerConfig asyncLoggerConfig) {
final LogEvent logEvent = prepareEvent(event);
return disruptor.getRingBuffer().tryPublishEvent(TRANSLATOR, logEvent, asyncLoggerConfig);
}
private LogEvent ensureImmutable(final LogEvent event) {
LogEvent result = event;
if (event instanceof RingBufferLogEvent) {
// Deal with special case where both types of Async Loggers are used together:
// RingBufferLogEvents are created by the all-loggers-async type, but
// this event is also consumed by the some-loggers-async type (this class).
// The original event will be re-used and modified in an application thread later,
// so take a snapshot of it, which can be safely processed in the
// some-loggers-async background thread.
result = ((RingBufferLogEvent) event).createMemento();
}
return result;
}
/*
* (non-Javadoc)
*
* @see org.apache.logging.log4j.core.async.AsyncLoggerConfigDelegate#createRingBufferAdmin(java.lang.String,
* java.lang.String)
*/
@Override
public RingBufferAdmin createRingBufferAdmin(final String contextName, final String loggerConfigName) {
return RingBufferAdmin.forAsyncLoggerConfig(disruptor.getRingBuffer(), contextName, loggerConfigName);
}
}
| |
package org.arrah.framework.dataquality;
/***************************************************
* Copyright to Amish Choudhary 2015 *
* *
* Any part of code or file can be changed, *
* redistributed, modified with the copyright *
* information intact *
* *
* Author$ : Amish Choudhary *
* Author$ : Vivek Singh *
* *
**************************************************/
/*
* This class is wrapper class on SimMetrics util
* which is used for matching records using fuzziness
* it will use following class for similarity test
* import org.simmetrics.metrics.*;
*
*/
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.arrah.framework.util.StringCaseFormatUtil;
public class RecordMatch
{
/*
* Returns the results
*/
public class Result implements Comparable<Object>
{
int index1;
int index2;
List<String> record1;
List<String> record2;
boolean isMatch;
float simMatchValResult;
Result(boolean isMatch, int index1, int index2, List<String> record1, List<String> record2, float simMatchVal)
{
this.isMatch = isMatch;
this.index1 = index1;
this.simMatchValResult = simMatchVal;
if(isMatch)
{
this.index2 = index2;
this.record1 = new ArrayList<String>(record1);
this.record2 = new ArrayList<String>(record2);
}
}
public int getLeftMatchIndex() {
return index1;
}
public int getRightMatchIndex() {
return index2;
}
public List<String> getLeftMatchedRow() {
return record1;
}
public List<String> getRightMatchedRow() {
return record2;
}
public boolean isMatch() {
return isMatch;
}
public float getSimMatchVal() {
return simMatchValResult;
}
public String toString()
{
if(isMatch)
{
return "Index [" + index1 + "] matched [" + index2 + "]";
}
else
{
return "Index [" + index1 + "] no match";
}
}
@Override
public int compareTo(Object o) {
Result newo = (Result)o;
if ( this.index1 > newo.index1)
return 1;
if ( this.index1 < newo.index1)
return -1;
return 0;
}
}
public class ColData
{
/*
* Column name
*/
private int m_colIndexA;
private int m_colIndexB;
private float m_matchIndex;
private String m_algoName;
public ColData(int colIndexA, int colIndexB, float matchIndex, String algoName)
{
m_colIndexA = colIndexA;
m_colIndexB = colIndexB;
m_matchIndex = matchIndex;
m_algoName = algoName;
}
public float getM_matchIndex() {
return m_matchIndex;
}
public void setM_matchIndex(float m_matchIndex) {
this.m_matchIndex = m_matchIndex;
}
public int getM_colIndexA() {
return m_colIndexA;
}
public void setM_colIndexA(int m_colIndexA) {
this.m_colIndexA = m_colIndexA;
}
public int getM_colIndexB() {
return m_colIndexB;
}
public void setM_colIndexB(int m_colIndexB) {
this.m_colIndexB = m_colIndexB;
}
public String getM_algoName() {
return m_algoName;
}
public void setM_algoName(String m_algoName) {
this.m_algoName = m_algoName;
}
};
public class MultiColData
{
private List<ColData> colA;
private List<ColData> colB;
private String algoName;
private boolean exactMatch;
private boolean firstRecordMatch;
public MultiColData()
{
colA = new ArrayList<ColData>();
colB = new ArrayList<ColData>();
algoName = new String();
exactMatch = true;
setFirstRecordMatch(false);
}
public List<ColData> getA()
{
return colA;
}
public List<ColData> getB()
{
return colB;
}
public void setA(List<ColData> a)
{
colA.addAll(a);
}
public void setB(List<ColData> b)
{
colB.addAll(b);
}
public String getAlgoName() {
return algoName;
}
public void setAlgoName(String algoName) {
this.algoName = algoName;
}
public boolean isExactMatch() {
return exactMatch;
}
public void setExactMatch(boolean exactMatch) {
this.exactMatch = exactMatch;
}
public boolean isFirstRecordMatch() {
return firstRecordMatch;
}
public void setFirstRecordMatch(boolean firstRecordMatch) {
this.firstRecordMatch = firstRecordMatch;
}
}
static ConcurrentMap <String,String> biclassmap;
static ConcurrentMap<String, Entry<Method, Object>> functor;
public class operator
{
public operator( )
{
biclassmap = new ConcurrentHashMap <String,String>();
functor = new ConcurrentHashMap <String,Entry<Method,Object>>();
biclassmap.put("org.simmetrics.metrics.BlockDistance","compare");
biclassmap.put("org.simmetrics.metrics.CosineSimilarity","compare");
biclassmap.put("org.simmetrics.metrics.DiceSimilarity","compare");
biclassmap.put("org.simmetrics.metrics.EuclideanDistance","compare");
biclassmap.put("org.simmetrics.metrics.JaccardSimilarity","compare");
biclassmap.put("org.simmetrics.metrics.Jaro","compare");
biclassmap.put("org.simmetrics.metrics.JaroWinkler","compare");
biclassmap.put("org.simmetrics.metrics.Levenshtein","compare");
biclassmap.put("org.simmetrics.metrics.MatchingCoefficient","compare");
biclassmap.put("org.arrah.framework.dataquality.SimmetricsUtil$MongeElkan","compare");
biclassmap.put("org.arrah.framework.dataquality.SimmetricsUtil$Soundex","compare");
biclassmap.put("org.arrah.framework.dataquality.SimmetricsUtil$qGramDistance","compare");
biclassmap.put("org.arrah.framework.dataquality.SimmetricsUtil$DoubleMetaPhone","compare");
biclassmap.put("org.arrah.framework.dataquality.SimmetricsUtil$CustomNames","compare");
biclassmap.put("org.simmetrics.metrics.SimonWhite","compare");
biclassmap.put("org.simmetrics.metrics.NeedlemanWunch","compare");
biclassmap.put("org.simmetrics.metrics.OverlapCoefficient","compare");
biclassmap.put("org.simmetrics.metrics.SmithWaterman","compare");
biclassmap.put("org.simmetrics.metrics.SmithWatermanGotoh","compare");
String methodName, className;
for(Entry<String, String> m : biclassmap.entrySet())
{
try
{
methodName = m.getValue();
className = m.getKey();
Class<?> cls = Class.forName(className);
Object ob = cls.newInstance();
// Create functor
if (className.equals("org.simmetrics.metrics.CosineSimilarity") ||
className.equals("org.simmetrics.metrics.DiceSimilarity") ||
className.equals("org.simmetrics.metrics.JaccardSimilarity") ||
className.equals("org.simmetrics.metrics.OverlapCoefficient")) {
functor.put(new String(className.substring(className.lastIndexOf('.') +1, className.length()).toUpperCase()), new AbstractMap.SimpleEntry<Method,Object>(cls.getDeclaredMethod(methodName, new Class[]{Set.class,Set.class}),ob));
} else if(className.equals("org.simmetrics.metrics.BlockDistance") ||
className.equals("org.simmetrics.metrics.EuclideanDistance") ||
className.equals("org.simmetrics.metrics.MatchingCoefficient") ||
className.equals("org.arrah.framework.dataquality.SimmetricsUtil$MongeElkan") ||
className.equals("org.simmetrics.metrics.SimonWhite")) {
functor.put(new String(className.substring(className.lastIndexOf('.') +1, className.length()).toUpperCase()), new AbstractMap.SimpleEntry<Method,Object>(cls.getDeclaredMethod(methodName, new Class[]{List.class,List.class}),ob));
}
else //String Type
functor.put(new String(className.substring(className.lastIndexOf('.') +1, className.length()).toUpperCase()), new AbstractMap.SimpleEntry<Method,Object>(cls.getDeclaredMethod(methodName, new Class[]{String.class,String.class}),ob));
}
catch(ClassNotFoundException cfe)
{
System.err.println("Class not found "+ cfe.toString());
System.err.println("Please make sure simmetrics_core-3.0.0.jar in CLASSPATH");
return;
}
catch(NoSuchMethodException nm)
{
System.err.println("Method not found "+ nm.toString());
return;
}
catch(Exception exp)
{
System.err.println("Exception: "+ exp.toString());
return;
}
}
}
public List<Result> compare(List<List<String>> left, List <List<String>> right, MultiColData meta1, MultiColData meta2)
{
//fuzzyCompare fz = new fuzzyCompare(meta1,false);
//List<Result> matched = Collections.synchronizedList(new ArrayList<Result>());
List<Result> matched = new ArrayList<Result>();
boolean firstRecordMatch = meta1.isFirstRecordMatch();
// Make it multi threaded for faster output
final int THREADCOUNT = 10;
//final int THREADCOUNT = 1; // for testing purpose only
Thread[] tid = new Thread[THREADCOUNT];
final int rowthread = left.size() / THREADCOUNT;
List<Result>[] matchedThread = new ArrayList[THREADCOUNT]; // for each thread
for (int i = 0; i < THREADCOUNT; i++) {
final int tindex = i;
matchedThread[tindex] = new ArrayList<Result>();
tid[tindex] = new Thread(new Runnable() {
public void run() {
List<List<String>> leftsub;
int lIndex = tindex * rowthread; int rIndex = 0; // leftIndex from where thread starts , right Index zero
boolean atleastOneRecordmatch;
fuzzyCompare fz = new fuzzyCompare(meta1,false); // to be hold by thread
if (tindex < THREADCOUNT - 1)
leftsub = left.subList(tindex * rowthread, tindex * rowthread + rowthread);
else
leftsub = left.subList(tindex * rowthread, left.size());
try {
int rowindex=0; // for instrumentation
int totalcount=leftsub.size();
for(List<String> l : leftsub)
{
//System.out.println((++rowindex) % 1000);
if ((++rowindex) % 500 == 0 ) { // For progress report
System.out.println(rowindex + " of " + totalcount + " Rows Processed for Thread:" + tindex
+" at:" + System.currentTimeMillis());
System.out.println("Length of Object:" + matchedThread[tindex].size());
}
atleastOneRecordmatch = false;
// System.out.println("Record left" + l);
//System.out.println("Startloop:" + System.currentTimeMillis());
for(List<String> r : right)
{
// System.out.println("Record right " + r);
//synchronized(fz) {
if(fz.compare(l, r) == 0)
{
atleastOneRecordmatch = true;
//matched.add(new Result(true,lIndex,rIndex,l,r,fz.simMatchVal));
matchedThread[tindex].add(new Result(true,lIndex,rIndex,l,r,fz.simMatchVal));
//System.out.println("Left Index:"+lIndex+" Right Index:"+rIndex);
//System.out.println("Sim:"+fz.simMatchVal+":"+l.toString()+ " " +r.toString());
// One row matched
//If we are looking for only first right to match
// first left go to next left row
if(firstRecordMatch)
break;
} //} // synchronized
rIndex++;
}
if(!atleastOneRecordmatch)
{
// not showing no matched value
//nomatch.add(new Result(false,lIndex,-1));
}
lIndex++;
rIndex = 0;
}
} catch (Exception e) {
System.out.println(" Thread Comparison Exception:"+e.getMessage());
}
}
});
tid[i].start();
}
for (int i = 0; i < THREADCOUNT; i++) {
try {
tid[i].join();
} catch (Exception e) {
System.out.println(" Thread Exception:"+e.getMessage());
}
}
for (int i = 0; i < THREADCOUNT; i++) {
matched.addAll(matchedThread[i]); // Put into big bucket
}
matched.sort(null); // natural sort on left index T
return matched;
}
};
/*
* Comparator for sorting
* Not used for time being
*/
class recordSorter implements Comparator<List<String>>
{
private MultiColData meta;
boolean leftSide;
public recordSorter(MultiColData metaA, boolean leftSide)
{
this.meta = metaA;
this.leftSide = leftSide;
}
@Override
public int compare(List<String> o1, List<String> o2) {
StringBuilder lA = new StringBuilder();
StringBuilder lB = new StringBuilder();
for(ColData dd: meta.getA() )
{
if(leftSide)
{
lA = lA.append(o1.get(dd.getM_colIndexA()));
lB= lB.append(o2.get(dd.getM_colIndexA()));
}
else
{
lA = lA.append(o1.get(dd.getM_colIndexA()));
lB= lB.append(o2.get(dd.getM_colIndexA()));
}
}
return lA.toString().compareTo(lB.toString());
}
}
/*
* Comparator for comparing rows
*/
class fuzzyCompare implements Comparator<List<String>>
{
private MultiColData metaA;
private float simMatchVal = 0f; // this will hold the last matched value between 0.00f - 1.00f
public fuzzyCompare(MultiColData metaA,boolean bycell)
{
this.metaA = metaA;
}
public float getsimMatchVal() {
return simMatchVal;
}
@Override
public int compare(List<String> o1, List<String> o2)
{
boolean exactMatch = metaA.isExactMatch();
boolean atLeastOneMatch = false;
float matchprob = 1; // default exact match
simMatchVal = 0f;
try
{
Entry<Method,Object> en = null;
for(ColData dd: metaA.getA() )
{
String algoName=dd.getM_algoName();
if (algoName.compareToIgnoreCase("MongeElkan") == 0 ||
algoName.compareToIgnoreCase("Soundex") == 0 ||
algoName.compareToIgnoreCase("qGramDistance") == 0 ||
algoName.compareToIgnoreCase("DoubleMetaPhone") == 0 ||
algoName.compareToIgnoreCase("CustomNames") == 0 )
algoName = "SIMMETRICSUTIL$"+algoName;
en = functor.get(algoName);
if((matchprob = dd.getM_matchIndex() )!= 1.0)
{
Object ob;
if (algoName.compareToIgnoreCase("CosineSimilarity") == 0 ||
algoName.compareToIgnoreCase("DiceSimilarity") == 0 ||
algoName.compareToIgnoreCase("JaccardSimilarity") == 0 ||
algoName.compareToIgnoreCase("OverlapCoefficient") == 0 ) {
ob = en.getKey().invoke(en.getValue(), StringCaseFormatUtil.toSetChar(o1.get(dd.getM_colIndexA())),
StringCaseFormatUtil.toSetChar(o2.get(dd.getM_colIndexB())));
} else if (algoName.compareToIgnoreCase("BlockDistance") == 0 ||
algoName.compareToIgnoreCase("EuclideanDistance") == 0 ||
algoName.compareToIgnoreCase("MatchingCoefficient") == 0 ||
algoName.compareToIgnoreCase("SimonWhite") == 0 ) {
ob = en.getKey().invoke(en.getValue(), StringCaseFormatUtil.toArrayListChar(o1.get(dd.getM_colIndexA())),
StringCaseFormatUtil.toArrayListChar(o2.get(dd.getM_colIndexB())));
} else if(algoName.compareToIgnoreCase("SimmetricsUtil$MongeElkan") == 0 ) {
ob = en.getKey().invoke(en.getValue(), StringCaseFormatUtil.toListString(o1.get(dd.getM_colIndexA())),
StringCaseFormatUtil.toListString(o2.get(dd.getM_colIndexB())));
} else
ob = en.getKey().invoke(en.getValue(), o1.get(dd.getM_colIndexA()),o2.get(dd.getM_colIndexB()));
// System.out.printf("\n [Col [%s] [%s] result %f ] " , o1.get(dd.getM_colIndexA()),o2.get(dd.getM_colIndexB()),(float)ob);
// System.out.printf(dd.getM_algoName());
// System.out.println("exactMatch:"+exactMatch);
// System.out.println("atLeastOneMatch:"+atLeastOneMatch);
simMatchVal = (Float)ob; // update the matched or unmatched value
if(simMatchVal < matchprob)
{
if(exactMatch) // exact match is AND operation
{
return -1;
}
}
else
{
atLeastOneMatch = true;
// System.out.printf("\n [Col [%s] [%s] result %f ] " , o1.get(dd.getM_colIndexA()),o2.get(dd.getM_colIndexB()),simMatchVal);
// System.out.printf(dd.getM_algoName());
if(!exactMatch) // OR Conditon
break;
}
} // All character Match
else
{
if((o1.get(dd.getM_colIndexA()).compareToIgnoreCase(o2.get(dd.getM_colIndexB()))) == 0)
{
simMatchVal = 1.00f ; // exact match
// System.out.printf("[Col [%s] [%s] matched]" , o1.get(dd.getM_colIndexA()),o2.get(dd.getM_colIndexB()));
atLeastOneMatch = true;
if(!exactMatch) // OR Condition any column matched
break;
}
else
{
simMatchVal = 0.00f ; // no match
// System.out.printf("[Col [%s] [%s] did not matched]" , o1.get(dd.getM_colIndexA()),o2.get(dd.getM_colIndexB()));
if(exactMatch)
{
return -1;
}
}
}
en = null;
}
} catch (InvocationTargetException x) {
x.printStackTrace();
return 1;
} catch (IllegalAccessException x) {
x.printStackTrace();
return 1;
}
if(exactMatch)
{
return 0;
}
else
{
// For exact Match we returned -1 for the first column mismatch
// However for Partial match we continued even after mismatch
// So check if atleast one matched
// Else return 0;
if(atLeastOneMatch)
{
return 0;
}
else
{
return -1;
}
}
}
} // End of FuzzyCompare class
/*
* simple Comparator for comparing two strings
*/
public class fuzzyCompareStrings
{
private float simMatchVal = 0f; // this will hold the last matched value between 0.00f - 1.00f
private String algo ="";
public fuzzyCompareStrings(String algo)
{
this.algo = algo;
}
public float getsimMatchVal() {
return simMatchVal;
}
public String getAlgo() {
return algo;
}
public void setAlgo(String algo) {
this.algo = algo;
}
public float compare(String o1, String o2)
{
simMatchVal = 0f;
try
{
Entry<Method,Object> en = null;
String algoName=algo;
if (algoName.compareToIgnoreCase("MongeElkan") == 0 ||
algoName.compareToIgnoreCase("Soundex") == 0 ||
algoName.compareToIgnoreCase("qGramDistance") == 0 ||
algoName.compareToIgnoreCase("DoubleMetaPhone") == 0 ||
algoName.compareToIgnoreCase("CustomNames") == 0 )
algoName = "SIMMETRICSUTIL$"+algoName;
en = functor.get(algoName);
// System.out.println(en );
// System.out.println( en.getValue());
// System.out.println( en.getKey());
Object ob;
if (algoName.compareToIgnoreCase("CosineSimilarity") == 0 ||
algoName.compareToIgnoreCase("DiceSimilarity") == 0 ||
algoName.compareToIgnoreCase("JaccardSimilarity") == 0 ||
algoName.compareToIgnoreCase("OverlapCoefficient") == 0 ) {
ob = en.getKey().invoke(en.getValue(), StringCaseFormatUtil.toSetChar(o1),
StringCaseFormatUtil.toSetChar(o2));
} else if (algoName.compareToIgnoreCase("BlockDistance") == 0 ||
algoName.compareToIgnoreCase("EuclideanDistance") == 0 ||
algoName.compareToIgnoreCase("MatchingCoefficient") == 0 ||
algoName.compareToIgnoreCase("SimonWhite") == 0 ) {
ob = en.getKey().invoke(en.getValue(), StringCaseFormatUtil.toArrayListChar(o1),
StringCaseFormatUtil.toArrayListChar(o2));
} else if(algoName.compareToIgnoreCase("SimmetricsUtil$MongeElkan") == 0 ) {
ob = en.getKey().invoke(en.getValue(), StringCaseFormatUtil.toListString(o1),
StringCaseFormatUtil.toListString(o2));
} else
ob = en.getKey().invoke(en.getValue(), o1,o2);
en = null;
return simMatchVal = (Float)ob; // update the matched or unmatched value
} catch (InvocationTargetException x) {
x.printStackTrace();
return 0f;
} catch (IllegalAccessException x) {
x.printStackTrace();
return 0f;
}
}
} // End of FuzzyCompare class
// For Unit testing
public static void main(String ... args)
{ /*
List<List<String>> lRecordList = new ArrayList<List<String>>();
List<List<String>> rRecordList = new ArrayList<List<String>>();
try(Scanner lFile = new Scanner( new File(args[0])); Scanner rFile = new Scanner(new File(args[1]));)
{
while(lFile.hasNextLine())
{
lRecordList.add(Arrays.asList(lFile.nextLine().split("\\s+")));
}
while(rFile.hasNextLine())
{
rRecordList.add(Arrays.asList(rFile.nextLine().split("\\s+")));
}
RecordMatch diff = new RecordMatch();
RecordMatch.ColData col1 = diff.new ColData(0,1,(float)0.8, "LEVENSHTEIN" );
List<RecordMatch.ColData> diffCols = new ArrayList<RecordMatch.ColData>();
diffCols.add(col1);
MultiColData m1 = diff.new MultiColData();
m1.setA(diffCols);
m1.setAlgoName("LEVENSHTEIN");
RecordMatch.operator doDiff = diff.new operator();
doDiff.compare(lRecordList, rRecordList, m1, m1);
//uk.ac.shef.wit.simmetrics.similaritymetrics.
}
catch (Exception excp)
{
excp.printStackTrace();
} */
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.snapshots.restore;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeOperationRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.Strings.hasLength;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.ImmutableSettings.readSettingsFromStream;
import static org.elasticsearch.common.settings.ImmutableSettings.writeSettingsToStream;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
/**
* Restore snapshot request
*/
public class RestoreSnapshotRequest extends MasterNodeOperationRequest<RestoreSnapshotRequest> {
private String snapshot;
private String repository;
private String[] indices = Strings.EMPTY_ARRAY;
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen();
private String renamePattern;
private String renameReplacement;
private boolean waitForCompletion;
private boolean includeGlobalState = true;
private boolean partial = false;
private boolean includeAliases = true;
private Settings settings = EMPTY_SETTINGS;
private Settings indexSettings = EMPTY_SETTINGS;
private String[] ignoreIndexSettings = Strings.EMPTY_ARRAY;
RestoreSnapshotRequest() {
}
/**
* Constructs a new put repository request with the provided repository and snapshot names.
*
* @param repository repository name
* @param snapshot snapshot name
*/
public RestoreSnapshotRequest(String repository, String snapshot) {
this.snapshot = snapshot;
this.repository = repository;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (snapshot == null) {
validationException = addValidationError("name is missing", validationException);
}
if (repository == null) {
validationException = addValidationError("repository is missing", validationException);
}
if (indices == null) {
validationException = addValidationError("indices are missing", validationException);
}
if (indicesOptions == null) {
validationException = addValidationError("indicesOptions is missing", validationException);
}
if (settings == null) {
validationException = addValidationError("settings are missing", validationException);
}
if (indexSettings == null) {
validationException = addValidationError("indexSettings are missing", validationException);
}
if (ignoreIndexSettings == null) {
validationException = addValidationError("ignoreIndexSettings are missing", validationException);
}
return validationException;
}
/**
* Sets the name of the snapshot.
*
* @param snapshot snapshot name
* @return this request
*/
public RestoreSnapshotRequest snapshot(String snapshot) {
this.snapshot = snapshot;
return this;
}
/**
* Returns the name of the snapshot.
*
* @return snapshot name
*/
public String snapshot() {
return this.snapshot;
}
/**
* Sets repository name
*
* @param repository repository name
* @return this request
*/
public RestoreSnapshotRequest repository(String repository) {
this.repository = repository;
return this;
}
/**
* Returns repository name
*
* @return repository name
*/
public String repository() {
return this.repository;
}
/**
* Sets the list of indices that should be restored from snapshot
* <p/>
* The list of indices supports multi-index syntax. For example: "+test*" ,"-test42" will index all indices with
* prefix "test" except index "test42". Aliases are not supported. An empty list or {"_all"} will restore all open
* indices in the snapshot.
*
* @param indices list of indices
* @return this request
*/
public RestoreSnapshotRequest indices(String... indices) {
this.indices = indices;
return this;
}
/**
* Sets the list of indices that should be restored from snapshot
* <p/>
* The list of indices supports multi-index syntax. For example: "+test*" ,"-test42" will index all indices with
* prefix "test" except index "test42". Aliases are not supported. An empty list or {"_all"} will restore all open
* indices in the snapshot.
*
* @param indices list of indices
* @return this request
*/
public RestoreSnapshotRequest indices(List<String> indices) {
this.indices = indices.toArray(new String[indices.size()]);
return this;
}
/**
* Returns list of indices that should be restored from snapshot
*
* @return
*/
public String[] indices() {
return indices;
}
/**
* Specifies what type of requested indices to ignore and how to deal with wildcard expressions.
* For example indices that don't exist.
*
* @return the desired behaviour regarding indices to ignore and wildcard indices expression
*/
public IndicesOptions indicesOptions() {
return indicesOptions;
}
/**
* Specifies what type of requested indices to ignore and how to deal with wildcard expressions.
* For example indices that don't exist.
*
* @param indicesOptions the desired behaviour regarding indices to ignore and wildcard indices expressions
* @return this request
*/
public RestoreSnapshotRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* Sets rename pattern that should be applied to restored indices.
* <p/>
* Indices that match the rename pattern will be renamed according to {@link #renameReplacement(String)}. The
* rename pattern is applied according to the {@link java.util.regex.Matcher#appendReplacement(StringBuffer, String)}
* The request will fail if two or more indices will be renamed into the same name.
*
* @param renamePattern rename pattern
* @return this request
*/
public RestoreSnapshotRequest renamePattern(String renamePattern) {
this.renamePattern = renamePattern;
return this;
}
/**
* Returns rename pattern
*
* @return rename pattern
*/
public String renamePattern() {
return renamePattern;
}
/**
* Sets rename replacement
* <p/>
* See {@link #renamePattern(String)} for more information.
*
* @param renameReplacement rename replacement
* @return
*/
public RestoreSnapshotRequest renameReplacement(String renameReplacement) {
this.renameReplacement = renameReplacement;
return this;
}
/**
* Returns rename replacement
*
* @return rename replacement
*/
public String renameReplacement() {
return renameReplacement;
}
/**
* If this parameter is set to true the operation will wait for completion of restore process before returning.
*
* @param waitForCompletion if true the operation will wait for completion
* @return this request
*/
public RestoreSnapshotRequest waitForCompletion(boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
return this;
}
/**
* Returns wait for completion setting
*
* @return true if the operation will wait for completion
*/
public boolean waitForCompletion() {
return waitForCompletion;
}
/**
* Returns true if indices with failed to snapshot shards should be partially restored.
*
* @return true if indices with failed to snapshot shards should be partially restored
*/
public boolean partial() {
return partial;
}
/**
* Set to true to allow indices with failed to snapshot shards should be partially restored.
*
* @param partial true if indices with failed to snapshot shards should be partially restored.
* @return this request
*/
public RestoreSnapshotRequest partial(boolean partial) {
this.partial = partial;
return this;
}
/**
* Sets repository-specific restore settings.
* <p/>
* See repository documentation for more information.
*
* @param settings repository-specific snapshot settings
* @return this request
*/
public RestoreSnapshotRequest settings(Settings settings) {
this.settings = settings;
return this;
}
/**
* Sets repository-specific restore settings.
* <p/>
* See repository documentation for more information.
*
* @param settings repository-specific snapshot settings
* @return this request
*/
public RestoreSnapshotRequest settings(Settings.Builder settings) {
this.settings = settings.build();
return this;
}
/**
* Sets repository-specific restore settings in JSON, YAML or properties format
* <p/>
* See repository documentation for more information.
*
* @param source repository-specific snapshot settings
* @return this request
*/
public RestoreSnapshotRequest settings(String source) {
this.settings = ImmutableSettings.settingsBuilder().loadFromSource(source).build();
return this;
}
/**
* Sets repository-specific restore settings
* <p/>
* See repository documentation for more information.
*
* @param source repository-specific snapshot settings
* @return this request
*/
public RestoreSnapshotRequest settings(Map<String, Object> source) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
return this;
}
/**
* Returns repository-specific restore settings
*
* @return restore settings
*/
public Settings settings() {
return this.settings;
}
/**
* Sets the list of index settings and index settings groups that shouldn't be restored from snapshot
*/
public RestoreSnapshotRequest ignoreIndexSettings(String... ignoreIndexSettings) {
this.ignoreIndexSettings = ignoreIndexSettings;
return this;
}
/**
* Sets the list of index settings and index settings groups that shouldn't be restored from snapshot
*/
public RestoreSnapshotRequest ignoreIndexSettings(List<String> ignoreIndexSettings) {
this.ignoreIndexSettings = ignoreIndexSettings.toArray(new String[ignoreIndexSettings.size()]);
return this;
}
/**
* Returns the list of index settings and index settings groups that shouldn't be restored from snapshot
*/
public String[] ignoreIndexSettings() {
return ignoreIndexSettings;
}
/**
* If set to true the restore procedure will restore global cluster state.
* <p/>
* The global cluster state includes persistent settings and index template definitions.
*
* @param includeGlobalState true if global state should be restored from the snapshot
* @return this request
*/
public RestoreSnapshotRequest includeGlobalState(boolean includeGlobalState) {
this.includeGlobalState = includeGlobalState;
return this;
}
/**
* Returns true if global state should be restored from this snapshot
*
* @return true if global state should be restored
*/
public boolean includeGlobalState() {
return includeGlobalState;
}
/**
* If set to true the restore procedure will restore aliases
*
* @param includeAliases true if aliases should be restored from the snapshot
* @return this request
*/
public RestoreSnapshotRequest includeAliases(boolean includeAliases) {
this.includeAliases = includeAliases;
return this;
}
/**
* Returns true if aliases should be restored from this snapshot
*
* @return true if aliases should be restored
*/
public boolean includeAliases() {
return includeAliases;
}
/**
* Sets settings that should be added/changed in all restored indices
*/
public RestoreSnapshotRequest indexSettings(Settings settings) {
this.indexSettings = settings;
return this;
}
/**
* Sets settings that should be added/changed in all restored indices
*/
public RestoreSnapshotRequest indexSettings(Settings.Builder settings) {
this.indexSettings = settings.build();
return this;
}
/**
* Sets settings that should be added/changed in all restored indices
*/
public RestoreSnapshotRequest indexSettings(String source) {
this.indexSettings = ImmutableSettings.settingsBuilder().loadFromSource(source).build();
return this;
}
/**
* Sets settings that should be added/changed in all restored indices
*/
public RestoreSnapshotRequest indexSettings(Map<String, Object> source) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
indexSettings(builder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
return this;
}
/**
* Returns settings that should be added/changed in all restored indices
*/
public Settings indexSettings() {
return this.indexSettings;
}
/**
* Parses restore definition
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(XContentBuilder source) {
try {
return source(source.bytes());
} catch (Exception e) {
throw new IllegalArgumentException("Failed to build json for repository request", e);
}
}
/**
* Parses restore definition
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(Map source) {
for (Map.Entry<String, Object> entry : ((Map<String, Object>) source).entrySet()) {
String name = entry.getKey();
if (name.equals("indices")) {
if (entry.getValue() instanceof String) {
indices(Strings.splitStringByCommaToArray((String) entry.getValue()));
} else if (entry.getValue() instanceof ArrayList) {
indices((ArrayList<String>) entry.getValue());
} else {
throw new IllegalArgumentException("malformed indices section, should be an array of strings");
}
} else if (name.equals("partial")) {
partial(nodeBooleanValue(entry.getValue()));
} else if (name.equals("settings")) {
if (!(entry.getValue() instanceof Map)) {
throw new IllegalArgumentException("malformed settings section");
}
settings((Map<String, Object>) entry.getValue());
} else if (name.equals("include_global_state")) {
includeGlobalState = nodeBooleanValue(entry.getValue());
} else if (name.equals("include_aliases")) {
includeAliases = nodeBooleanValue(entry.getValue());
} else if (name.equals("rename_pattern")) {
if (entry.getValue() instanceof String) {
renamePattern((String) entry.getValue());
} else {
throw new IllegalArgumentException("malformed rename_pattern");
}
} else if (name.equals("rename_replacement")) {
if (entry.getValue() instanceof String) {
renameReplacement((String) entry.getValue());
} else {
throw new IllegalArgumentException("malformed rename_replacement");
}
} else if (name.equals("index_settings")) {
if (!(entry.getValue() instanceof Map)) {
throw new IllegalArgumentException("malformed index_settings section");
}
indexSettings((Map<String, Object>) entry.getValue());
} else if (name.equals("ignore_index_settings")) {
if (entry.getValue() instanceof String) {
ignoreIndexSettings(Strings.splitStringByCommaToArray((String) entry.getValue()));
} else if (entry.getValue() instanceof List) {
ignoreIndexSettings((List<String>) entry.getValue());
} else {
throw new IllegalArgumentException("malformed ignore_index_settings section, should be an array of strings");
}
} else {
throw new IllegalArgumentException("Unknown parameter " + name);
}
}
indicesOptions(IndicesOptions.fromMap((Map<String, Object>) source, IndicesOptions.lenientExpandOpen()));
return this;
}
/**
* Parses restore definition
* <p/>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(String source) {
if (hasLength(source)) {
try {
return source(XContentFactory.xContent(source).createParser(source).mapOrderedAndClose());
} catch (Exception e) {
throw new IllegalArgumentException("failed to parse repository source [" + source + "]", e);
}
}
return this;
}
/**
* Parses restore definition
* <p/>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(byte[] source) {
return source(source, 0, source.length);
}
/**
* Parses restore definition
* <p/>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @param offset offset
* @param length length
* @return this request
*/
public RestoreSnapshotRequest source(byte[] source, int offset, int length) {
if (length > 0) {
try {
return source(XContentFactory.xContent(source, offset, length).createParser(source, offset, length).mapOrderedAndClose());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse repository source", e);
}
}
return this;
}
/**
* Parses restore definition
* <p/>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(BytesReference source) {
try {
return source(XContentFactory.xContent(source).createParser(source).mapOrderedAndClose());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse template source", e);
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
snapshot = in.readString();
repository = in.readString();
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
renamePattern = in.readOptionalString();
renameReplacement = in.readOptionalString();
waitForCompletion = in.readBoolean();
includeGlobalState = in.readBoolean();
partial = in.readBoolean();
includeAliases = in.readBoolean();
settings = readSettingsFromStream(in);
if (in.getVersion().onOrAfter(Version.V_1_5_0)) {
indexSettings = readSettingsFromStream(in);
ignoreIndexSettings = in.readStringArray();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(snapshot);
out.writeString(repository);
out.writeStringArray(indices);
indicesOptions.writeIndicesOptions(out);
out.writeOptionalString(renamePattern);
out.writeOptionalString(renameReplacement);
out.writeBoolean(waitForCompletion);
out.writeBoolean(includeGlobalState);
out.writeBoolean(partial);
out.writeBoolean(includeAliases);
writeSettingsToStream(settings, out);
if (out.getVersion().onOrAfter(Version.V_1_5_0)) {
writeSettingsToStream(indexSettings, out);
out.writeStringArray(ignoreIndexSettings);
}
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.runners.worker;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import com.google.api.client.http.LowLevelHttpResponse;
import com.google.api.client.json.Json;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpRequest;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
import com.google.api.client.util.Sleeper;
import com.google.api.services.dataflow.Dataflow;
import com.google.api.services.dataflow.model.LeaseWorkItemRequest;
import com.google.api.services.dataflow.model.LeaseWorkItemResponse;
import com.google.api.services.dataflow.model.MapTask;
import com.google.api.services.dataflow.model.SeqMapTask;
import com.google.api.services.dataflow.model.WorkItem;
import com.google.cloud.dataflow.sdk.options.DataflowWorkerHarnessOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.runners.worker.logging.DataflowWorkerLoggingMDC;
import com.google.cloud.dataflow.sdk.testing.FastNanoClockAndSleeper;
import com.google.cloud.dataflow.sdk.testing.RestoreDataflowLoggingMDC;
import com.google.cloud.dataflow.sdk.testing.RestoreSystemProperties;
import com.google.cloud.dataflow.sdk.util.IntervalBoundedExponentialBackOff;
import com.google.cloud.dataflow.sdk.util.TestCredential;
import com.google.cloud.dataflow.sdk.util.Transport;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
/** Unit tests for {@link DataflowWorkerHarness}. */
@RunWith(JUnit4.class)
public class DataflowWorkerHarnessTest {
@Rule public TestRule restoreSystemProperties = new RestoreSystemProperties();
@Rule public TestRule restoreLogging = new RestoreDataflowLoggingMDC();
@Rule public ExpectedException expectedException = ExpectedException.none();
@Rule public FastNanoClockAndSleeper fastNanoClockAndSleeper = new FastNanoClockAndSleeper();
@Mock private MockHttpTransport transport;
@Mock private MockLowLevelHttpRequest request;
@Mock private DataflowWorker mockDataflowWorker;
private DataflowWorkerHarnessOptions pipelineOptions;
private Dataflow service;
private static final String PROJECT_ID = "TEST_PROJECT_ID";
private static final String JOB_ID = "TEST_JOB_ID";
private static final String WORKER_ID = "TEST_WORKER_ID";
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
when(transport.buildRequest(anyString(), anyString())).thenReturn(request);
doCallRealMethod().when(request).getContentAsString();
service = new Dataflow(transport, Transport.getJsonFactory(), null);
pipelineOptions = PipelineOptionsFactory.as(DataflowWorkerHarnessOptions.class);
pipelineOptions.setProject(PROJECT_ID);
pipelineOptions.setJobId(JOB_ID);
pipelineOptions.setWorkerId(WORKER_ID);
pipelineOptions.setGcpCredential(new TestCredential());
}
@Test
public void testThatWeRetryIfTaskExecutionFailAgainAndAgain() throws Exception {
final int numWorkers = Math.max(Runtime.getRuntime().availableProcessors(), 1);
when(mockDataflowWorker.getAndPerformWork()).thenReturn(false);
final AtomicInteger sleepCount = new AtomicInteger(0);
final AtomicInteger illegalIntervalCount = new AtomicInteger(0);
DataflowWorkerHarness.processWork(
pipelineOptions,
mockDataflowWorker,
new Sleeper() {
@Override
public void sleep(long millis) throws InterruptedException {
if ((millis
> DataflowWorkerHarness.BACKOFF_MAX_INTERVAL_MILLIS
* (1 + IntervalBoundedExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR))) {
// We count the times the sleep interval is greater than the backoff max interval with
// randomization to make sure it does not happen.
illegalIntervalCount.incrementAndGet();
}
if (sleepCount.incrementAndGet() > 1000) {
throw new InterruptedException("Stopping the retry loop.");
}
}
});
// Test that the backoff mechanism will allow at least 1000 failures.
verify(mockDataflowWorker, times(numWorkers + 1000)).getAndPerformWork();
verifyNoMoreInteractions(mockDataflowWorker);
assertEquals(0, illegalIntervalCount.get());
}
@Test
public void testCreationOfWorkerHarness() throws Exception {
assertNotNull(DataflowWorkerHarness.create(pipelineOptions));
assertEquals(JOB_ID, DataflowWorkerLoggingMDC.getJobId());
assertEquals(WORKER_ID, DataflowWorkerLoggingMDC.getWorkerId());
}
@Test
public void testCloudServiceCall() throws Exception {
WorkItem workItem = createWorkItem(PROJECT_ID, JOB_ID);
when(request.execute()).thenReturn(generateMockResponse(workItem));
DataflowWorker.WorkUnitClient client =
new DataflowWorkerHarness.DataflowWorkUnitClient(service, pipelineOptions);
assertEquals(workItem, client.getWorkItem());
LeaseWorkItemRequest actualRequest = Transport.getJsonFactory().fromString(
request.getContentAsString(), LeaseWorkItemRequest.class);
assertEquals(WORKER_ID, actualRequest.getWorkerId());
assertEquals(ImmutableList.<String>of(WORKER_ID, "remote_source", "custom_source"),
actualRequest.getWorkerCapabilities());
assertEquals(ImmutableList.<String>of("map_task", "seq_map_task", "remote_source_task"),
actualRequest.getWorkItemTypes());
assertEquals("1234", DataflowWorkerLoggingMDC.getWorkId());
}
@Test
public void testCloudServiceCallMapTaskStagePropagation() throws Exception {
DataflowWorker.WorkUnitClient client =
new DataflowWorkerHarness.DataflowWorkUnitClient(service, pipelineOptions);
// Publish and acquire a map task work item, and verify we're now processing that stage.
final String stageName = "test_stage_name";
MapTask mapTask = new MapTask();
mapTask.setStageName(stageName);
WorkItem workItem = createWorkItem(PROJECT_ID, JOB_ID);
workItem.setMapTask(mapTask);
when(request.execute()).thenReturn(generateMockResponse(workItem));
assertEquals(workItem, client.getWorkItem());
assertEquals(stageName, DataflowWorkerLoggingMDC.getStageName());
}
@Test
public void testCloudServiceCallSeqMapTaskStagePropagation() throws Exception {
DataflowWorker.WorkUnitClient client =
new DataflowWorkerHarness.DataflowWorkUnitClient(service, pipelineOptions);
// Publish and acquire a seq map task work item, and verify we're now processing that stage.
final String stageName = "test_stage_name";
SeqMapTask seqMapTask = new SeqMapTask();
seqMapTask.setStageName(stageName);
WorkItem workItem = createWorkItem(PROJECT_ID, JOB_ID);
workItem.setSeqMapTask(seqMapTask);
when(request.execute()).thenReturn(generateMockResponse(workItem));
assertEquals(workItem, client.getWorkItem());
assertEquals(stageName, DataflowWorkerLoggingMDC.getStageName());
}
@Test
public void testCloudServiceCallNoWorkId() throws Exception {
// If there's no work the service should return an empty work item.
WorkItem workItem = new WorkItem();
when(request.execute()).thenReturn(generateMockResponse(workItem));
DataflowWorker.WorkUnitClient client =
new DataflowWorkerHarness.DataflowWorkUnitClient(service, pipelineOptions);
assertNull(client.getWorkItem());
LeaseWorkItemRequest actualRequest = Transport.getJsonFactory().fromString(
request.getContentAsString(), LeaseWorkItemRequest.class);
assertEquals(WORKER_ID, actualRequest.getWorkerId());
assertEquals(ImmutableList.<String>of(WORKER_ID, "remote_source", "custom_source"),
actualRequest.getWorkerCapabilities());
assertEquals(ImmutableList.<String>of("map_task", "seq_map_task", "remote_source_task"),
actualRequest.getWorkItemTypes());
}
@Test
public void testCloudServiceCallNoWorkItem() throws Exception {
when(request.execute()).thenReturn(generateMockResponse());
DataflowWorker.WorkUnitClient client =
new DataflowWorkerHarness.DataflowWorkUnitClient(service, pipelineOptions);
assertNull(client.getWorkItem());
LeaseWorkItemRequest actualRequest = Transport.getJsonFactory().fromString(
request.getContentAsString(), LeaseWorkItemRequest.class);
assertEquals(WORKER_ID, actualRequest.getWorkerId());
assertEquals(ImmutableList.<String>of(WORKER_ID, "remote_source", "custom_source"),
actualRequest.getWorkerCapabilities());
assertEquals(ImmutableList.<String>of("map_task", "seq_map_task", "remote_source_task"),
actualRequest.getWorkItemTypes());
}
@Test
public void testCloudServiceCallMultipleWorkItems() throws Exception {
expectedException.expect(IOException.class);
expectedException.expectMessage(
"This version of the SDK expects no more than one work item from the service");
WorkItem workItem1 = createWorkItem(PROJECT_ID, JOB_ID);
WorkItem workItem2 = createWorkItem(PROJECT_ID, JOB_ID);
when(request.execute()).thenReturn(generateMockResponse(workItem1, workItem2));
DataflowWorker.WorkUnitClient client =
new DataflowWorkerHarness.DataflowWorkUnitClient(service, pipelineOptions);
client.getWorkItem();
}
private LowLevelHttpResponse generateMockResponse(WorkItem ... workItems) throws Exception {
MockLowLevelHttpResponse response = new MockLowLevelHttpResponse();
response.setContentType(Json.MEDIA_TYPE);
LeaseWorkItemResponse lease = new LeaseWorkItemResponse();
lease.setWorkItems(Lists.newArrayList(workItems));
// N.B. Setting the factory is necessary in order to get valid JSON.
lease.setFactory(Transport.getJsonFactory());
response.setContent(lease.toPrettyString());
return response;
}
private WorkItem createWorkItem(String projectId, String jobId) {
WorkItem workItem = new WorkItem();
workItem.setFactory(Transport.getJsonFactory());
workItem.setProjectId(projectId);
workItem.setJobId(jobId);
// We need to set a work id because otherwise the client will treat the response as
// indicating no work is available.
workItem.setId(1234L);
return workItem;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.persistence.entity;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import org.camunda.bpm.engine.impl.context.Context;
import org.camunda.bpm.engine.impl.db.DbEntity;
import org.camunda.bpm.engine.impl.db.HasDbRevision;
import org.camunda.bpm.engine.impl.event.EventHandler;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.jobexecutor.EventSubscriptionJobDeclaration;
import org.camunda.bpm.engine.impl.pvm.process.ActivityImpl;
import org.camunda.bpm.engine.impl.pvm.process.ProcessDefinitionImpl;
import org.camunda.bpm.engine.impl.util.ClockUtil;
import org.camunda.bpm.engine.runtime.EventSubscription;
import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull;
/**
* @author Daniel Meyer
*/
public abstract class EventSubscriptionEntity implements EventSubscription, DbEntity, HasDbRevision, Serializable {
private static final long serialVersionUID = 1L;
// persistent state ///////////////////////////
protected String id;
protected int revision = 1;
protected String eventType;
protected String eventName;
protected String executionId;
protected String processInstanceId;
protected String activityId;
protected String configuration;
protected Date created;
protected String tenantId;
// runtime state /////////////////////////////
protected ExecutionEntity execution;
protected ActivityImpl activity;
protected EventSubscriptionJobDeclaration jobDeclaration;
/////////////////////////////////////////////
public EventSubscriptionEntity() {
this.created = ClockUtil.getCurrentTime();
}
public EventSubscriptionEntity(ExecutionEntity executionEntity) {
this();
setExecution(executionEntity);
setActivity(execution.getActivity());
this.processInstanceId = executionEntity.getProcessInstanceId();
this.tenantId = executionEntity.getTenantId();
}
// processing /////////////////////////////
public void eventReceived(Object payload, boolean processASync) {
if(processASync) {
scheduleEventAsync(payload);
} else {
processEventSync(payload);
}
}
protected void processEventSync(Object payload) {
EventHandler eventHandler = Context.getProcessEngineConfiguration().getEventHandler(eventType);
ensureNotNull("Could not find eventhandler for event of type '" + eventType + "'", "eventHandler", eventHandler);
eventHandler.handleEvent(this, payload, Context.getCommandContext());
}
protected void scheduleEventAsync(Object payload) {
EventSubscriptionJobDeclaration asyncDeclaration = getJobDeclaration();
if (asyncDeclaration == null) {
// fallback to sync if we couldn't find a job declaration
processEventSync(payload);
}
else {
MessageEntity message = asyncDeclaration.createJobInstance(this);
CommandContext commandContext = Context.getCommandContext();
commandContext.getJobManager().send(message);
}
}
// persistence behavior /////////////////////
public void delete() {
Context.getCommandContext()
.getEventSubscriptionManager()
.deleteEventSubscription(this);
removeFromExecution();
}
public void insert() {
Context.getCommandContext()
.getEventSubscriptionManager()
.insert(this);
addToExecution();
}
// referential integrity -> ExecutionEntity ////////////////////////////////////
protected void addToExecution() {
// add reference in execution
ExecutionEntity execution = getExecution();
if(execution != null) {
execution.addEventSubscription(this);
}
}
protected void removeFromExecution() {
// remove reference in execution
ExecutionEntity execution = getExecution();
if(execution != null) {
execution.removeEventSubscription(this);
}
}
public Object getPersistentState() {
HashMap<String, Object> persistentState = new HashMap<String, Object>();
persistentState.put("executionId", executionId);
persistentState.put("configuration", configuration);
persistentState.put("activityId", activityId);
persistentState.put("eventName", eventName);
return persistentState;
}
// getters & setters ////////////////////////////
public ExecutionEntity getExecution() {
if(execution == null && executionId != null) {
execution = Context.getCommandContext()
.getExecutionManager()
.findExecutionById(executionId);
}
return execution;
}
public void setExecution(ExecutionEntity execution) {
if(execution != null) {
this.execution = execution;
this.executionId = execution.getId();
addToExecution();
}
else {
removeFromExecution();
this.executionId = null;
this.execution = null;
}
}
public ActivityImpl getActivity() {
if(activity == null && activityId != null) {
ProcessDefinitionImpl processDefinition = getProcessDefinition();
activity = processDefinition.findActivity(activityId);
}
return activity;
}
public ProcessDefinitionEntity getProcessDefinition() {
if (executionId != null) {
ExecutionEntity execution = getExecution();
return (ProcessDefinitionEntity) execution.getProcessDefinition();
}
else {
// this assumes that start event subscriptions have the process definition id
// as their configuration (which holds for message and signal start events)
String processDefinitionId = getConfiguration();
return Context.getProcessEngineConfiguration()
.getDeploymentCache()
.findDeployedProcessDefinitionById(processDefinitionId);
}
}
public void setActivity(ActivityImpl activity) {
this.activity = activity;
if(activity != null) {
this.activityId = activity.getId();
}
}
public EventSubscriptionJobDeclaration getJobDeclaration() {
if (jobDeclaration == null) {
jobDeclaration = EventSubscriptionJobDeclaration.findDeclarationForSubscription(this);
}
return jobDeclaration;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public int getRevision() {
return revision;
}
public void setRevision(int revision) {
this.revision = revision;
}
public int getRevisionNext() {
return revision +1;
}
public String getEventType() {
return eventType;
}
public void setEventType(String eventType) {
this.eventType = eventType;
}
public String getEventName() {
return eventName;
}
public void setEventName(String eventName) {
this.eventName = eventName;
}
public String getExecutionId() {
return executionId;
}
public void setExecutionId(String executionId) {
this.executionId = executionId;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public void setProcessInstanceId(String processInstanceId) {
this.processInstanceId = processInstanceId;
}
public String getConfiguration() {
return configuration;
}
public void setConfiguration(String configuration) {
this.configuration = configuration;
}
public String getActivityId() {
return activityId;
}
public void setActivityId(String activityId) {
this.activityId = activityId;
this.activity = null;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public String getTenantId() {
return tenantId;
}
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EventSubscriptionEntity other = (EventSubscriptionEntity) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
@Override
public String toString() {
return this.getClass().getSimpleName()
+ "[id=" + id
+ ", eventType=" + eventType
+ ", eventName=" + eventName
+ ", executionId=" + executionId
+ ", processInstanceId=" + processInstanceId
+ ", activityId=" + activityId
+ ", tenantId=" + tenantId
+ ", configuration=" + configuration
+ ", revision=" + revision
+ ", created=" + created
+ "]";
}
}
| |
package org.ethereum.db;
import org.ethereum.core.AccountState;
import org.ethereum.core.Block;
import org.ethereum.facade.Repository;
import org.ethereum.vm.DataWord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spongycastle.util.encoders.Hex;
import java.math.BigInteger;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static org.ethereum.crypto.SHA3Helper.sha3;
import static org.ethereum.util.ByteUtil.wrap;
/**
* @author Roman Mandeleil
* @since 17.11.2014
*/
public class RepositoryDummy extends RepositoryImpl {
private static final Logger logger = LoggerFactory.getLogger("repository");
private Map<ByteArrayWrapper, AccountState> worldState = new HashMap<>();
private Map<ByteArrayWrapper, ContractDetails> detailsDB = new HashMap<>();
public RepositoryDummy() {
super(false);
}
@Override
public void reset() {
worldState.clear();
detailsDB.clear();
}
@Override
public void close() {
throw new UnsupportedOperationException();
}
@Override
public boolean isClosed() {
throw new UnsupportedOperationException();
}
@Override
public void updateBatch(HashMap<ByteArrayWrapper, AccountState> stateCache, HashMap<ByteArrayWrapper,
ContractDetails> detailsCache) {
for (ByteArrayWrapper hash : stateCache.keySet()) {
AccountState accountState = stateCache.get(hash);
ContractDetails contractDetails = detailsCache.get(hash);
if (accountState.isDeleted()) {
worldState.remove(hash);
detailsDB.remove(hash);
logger.debug("delete: [{}]",
Hex.toHexString(hash.getData()));
} else {
if (accountState.isDirty() || contractDetails.isDirty()) {
detailsDB.put(hash, contractDetails);
accountState.setStateRoot(contractDetails.getStorageHash());
accountState.setCodeHash(sha3(contractDetails.getCode()));
worldState.put(hash, accountState);
if (logger.isDebugEnabled()) {
logger.debug("update: [{}],nonce: [{}] balance: [{}] \n [{}]",
Hex.toHexString(hash.getData()),
accountState.getNonce(),
accountState.getBalance(),
contractDetails.getStorage());
}
}
}
}
stateCache.clear();
detailsCache.clear();
}
@Override
public void flush() {
throw new UnsupportedOperationException();
}
@Override
public void rollback() {
throw new UnsupportedOperationException();
}
@Override
public void commit() {
throw new UnsupportedOperationException();
}
@Override
public void syncToRoot(byte[] root) {
throw new UnsupportedOperationException();
}
@Override
public Repository startTracking() {
return new RepositoryTrack(this);
}
@Override
public void dumpState(Block block, long gasUsed, int txNumber, byte[] txHash) {
}
@Override
public Set<byte[]> getAccountsKeys() {
return null;
}
public Set<ByteArrayWrapper> getFullAddressSet() {
return worldState.keySet();
}
@Override
public BigInteger addBalance(byte[] addr, BigInteger value) {
AccountState account = getAccountState(addr);
if (account == null)
account = createAccount(addr);
BigInteger result = account.addToBalance(value);
worldState.put(wrap(addr), account);
return result;
}
@Override
public BigInteger getBalance(byte[] addr) {
AccountState account = getAccountState(addr);
if (account == null)
return BigInteger.ZERO;
return account.getBalance();
}
@Override
public DataWord getStorageValue(byte[] addr, DataWord key) {
ContractDetails details = getContractDetails(addr);
if (details == null)
return null;
return details.get(key);
}
@Override
public void addStorageRow(byte[] addr, DataWord key, DataWord value) {
ContractDetails details = getContractDetails(addr);
if (details == null) {
createAccount(addr);
details = getContractDetails(addr);
}
details.put(key, value);
detailsDB.put(wrap(addr), details);
}
@Override
public byte[] getCode(byte[] addr) {
ContractDetails details = getContractDetails(addr);
if (details == null)
return null;
return details.getCode();
}
@Override
public void saveCode(byte[] addr, byte[] code) {
ContractDetails details = getContractDetails(addr);
if (details == null) {
createAccount(addr);
details = getContractDetails(addr);
}
details.setCode(code);
detailsDB.put(wrap(addr), details);
}
@Override
public BigInteger getNonce(byte[] addr) {
AccountState account = getAccountState(addr);
if (account == null)
account = createAccount(addr);
return account.getNonce();
}
@Override
public BigInteger increaseNonce(byte[] addr) {
AccountState account = getAccountState(addr);
if (account == null)
account = createAccount(addr);
account.incrementNonce();
worldState.put(wrap(addr), account);
return account.getNonce();
}
public BigInteger setNonce(byte[] addr, BigInteger nonce) {
AccountState account = getAccountState(addr);
if (account == null)
account = createAccount(addr);
account.setNonce(nonce);
worldState.put(wrap(addr), account);
return account.getNonce();
}
@Override
public void delete(byte[] addr) {
worldState.remove(wrap(addr));
detailsDB.remove(wrap(addr));
}
@Override
public ContractDetails getContractDetails(byte[] addr) {
return detailsDB.get(wrap(addr));
}
@Override
public AccountState getAccountState(byte[] addr) {
return worldState.get(wrap(addr));
}
@Override
public AccountState createAccount(byte[] addr) {
AccountState accountState = new AccountState();
worldState.put(wrap(addr), accountState);
ContractDetails contractDetails = new ContractDetailsImpl();
detailsDB.put(wrap(addr), contractDetails);
return accountState;
}
@Override
public boolean isExist(byte[] addr) {
return getAccountState(addr) != null;
}
@Override
public byte[] getRoot() {
throw new UnsupportedOperationException();
}
@Override
public void loadAccount(byte[] addr, HashMap<ByteArrayWrapper, AccountState> cacheAccounts, HashMap<ByteArrayWrapper, ContractDetails> cacheDetails) {
AccountState account = getAccountState(addr);
ContractDetails details = getContractDetails(addr);
if (account == null)
account = new AccountState();
else
account = account.clone();
if (details == null)
details = new ContractDetailsImpl();
else
details = details.clone();
cacheAccounts.put(wrap(addr), account);
cacheDetails.put(wrap(addr), details);
}
}
| |
/*
Derby - Class org.apache.derby.client.am.Lob
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.client.am;
import java.io.InputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.sql.SQLException;
import org.apache.derby.client.net.NetConfiguration;
import org.apache.derby.client.net.NetConnection;
import org.apache.derby.shared.common.reference.SQLState;
public abstract class Lob implements UnitOfWorkListener {
// The following flags specify the data type(s) a LOB instance currently contains
public static final int STRING = 2;
public static final int ASCII_STREAM = 4;
public static final int UNICODE_STREAM = 8;
public static final int CHARACTER_STREAM = 16;
public static final int BINARY_STREAM = 32;
public static final int BINARY_STRING = 64;
public static final int LOCATOR = 128;
public static final int INVALID_LOCATOR = -1;
//---------------------navigational members-----------------------------------
protected Agent agent_;
//-----------------------------state------------------------------------------
protected int dataType_ = 0; // data type(s) the LOB instance currently contains
protected int locator_ = INVALID_LOCATOR; // locator id for this LOB
private long sqlLength_;// length of the LOB value, as defined by the server
private boolean lengthObtained_;
/**
* This boolean variable indicates whether the Lob object has been
* invalidated by calling free() on it
*/
protected boolean isValid_ = true;
final private boolean willBeLayerBStreamed_;
//A running counter that keeps track
//of whether a update has been done
//on this LOB value. We do not need
//to bother about the limit imposed
//by this counter because we just check
//whether its latest value matches hence
//for all practical purposes there is no
//limit imposed.
private long updateCount;
/**
* This integer identifies which transaction the Lob is associated with
*/
private int transactionID_;
//-----------------------------messageId------------------------------------------
final static protected ClientMessageId LOB_OBJECT_LENGTH_UNKNOWN_YET =
new ClientMessageId( SQLState.LOB_OBJECT_LENGTH_UNKNOWN_YET );
//---------------------constructors/finalizer---------------------------------
protected Lob(Agent agent,
boolean willBeLayerBStreamed) {
agent_ = agent;
lengthObtained_ = false;
willBeLayerBStreamed_ = willBeLayerBStreamed;
transactionID_ = agent_.connection_.getTransactionID();
}
protected void finalize() throws java.lang.Throwable {
super.finalize();
}
// ---------------------------jdbc 2------------------------------------------
/**
* Return the length of the Lob value represented by this Lob
* object. If length is not already known, and Lob is locator
* based, length will be retrieved from the server. If not,
* locator based, Lob will first be materialized. NOTE: The
* caller needs to deal with synchronization.
*
* @throws SqlException on execution errors while materializing the stream,
* or if Layer B streaming is used and length not yet obtained.
* @return length of Lob value
*/
long sqlLength() throws SqlException
{
if (lengthObtained_) return sqlLength_;
if (isLocator()) {
sqlLength_ = getLocatorLength();
lengthObtained_ = true;
} else if (willBeLayerBStreamed()) {
throw new SqlException(agent_.logWriter_,
LOB_OBJECT_LENGTH_UNKNOWN_YET);
} else {
materializeStream(); // Will set sqlLength_
}
return sqlLength_;
}
/**
* Update the registered length of the Lob value. To be called by
* methods that make changes to the length of the Lob.
* NOTE: The caller needs to deal with synchronization.
*
* @param length the new length of the Lob value
*/
void setSqlLength(long length)
{
sqlLength_ = length;
lengthObtained_ = true;
}
/**
* Get the length of locator based Lob from the server. This is a
* dummy implementation that is supposed to be overridden by
* subclasses. A stored procedure call will be made to get the
* length from the server.
*
* @throws org.apache.derby.client.am.SqlException
* @return length of Lob
*/
long getLocatorLength() throws SqlException
{
return -1;
}
//-----------------------event callback methods-------------------------------
public void listenToUnitOfWork() {
agent_.connection_.CommitAndRollbackListeners_.put(this,null);
}
public void completeLocalCommit(java.util.Iterator listenerIterator) {
listenerIterator.remove();
}
public void completeLocalRollback(java.util.Iterator listenerIterator) {
listenerIterator.remove();
}
//----------------------------helper methods----------------------------------
public Agent getAgent() {
return agent_;
}
void checkForClosedConnection() throws SqlException {
if (agent_.connection_.isClosedX()) {
agent_.checkForDeferredExceptions();
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.LOB_METHOD_ON_CLOSED_CONNECTION));
} else {
agent_.checkForDeferredExceptions();
}
}
void completeLocalRollback() {
;
}
void completeLocalCommit() {
;
}
/**
* Method to be implemented by subclasses, so that
* #materializedStream(InputStream, String) can be called with subclass
* specific parameters and the result assigned to the right stream.
*
* @throws SqlException
*/
protected abstract void materializeStream() throws SqlException;
/**
* Materialize the given stream into memory and update the internal
* length variable.
*
* @param is stream to use for input
* @param typeDesc description of the data type we are inserting,
* for instance <code>java.sql.Clob</code>
* @return a stream whose source is the materialized data
* @throws SqlException if the stream exceeds 2 GB, or an error happens
* while reading from the stream
*/
protected InputStream materializeStream(InputStream is, String typeDesc)
throws SqlException {
final int GROWBY = 32 * 1024; // 32 KB
ArrayList byteArrays = new ArrayList();
byte[] curBytes = new byte[GROWBY];
int totalLength = 0;
int partLength = 0;
// Read all data from the stream, storing it in a number of arrays.
try {
do {
partLength = is.read(curBytes, 0, curBytes.length);
if (partLength == curBytes.length) {
byteArrays.add(curBytes);
// Make sure we don't exceed 2 GB by checking for overflow.
int newLength = totalLength + GROWBY;
if (newLength < 0 || newLength == Integer.MAX_VALUE) {
curBytes = new byte[Integer.MAX_VALUE - totalLength];
} else {
curBytes = new byte[GROWBY];
}
}
if (partLength > 0) {
totalLength += partLength;
}
} while (partLength == GROWBY);
// Make sure stream is exhausted.
if (is.read() != -1) {
// We have exceeded 2 GB.
throw new SqlException(
null,
new ClientMessageId(
SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE),
typeDesc
);
}
if (partLength > 0) {
byteArrays.add(curBytes);
}
// Cleanup and set state.
curBytes = null;
sqlLength_ = totalLength;
lengthObtained_ = true;
// Return a stream whose source is a list of byte arrays.
// This avoids having to copy all the data into a single big array.
return new ByteArrayCombinerStream(byteArrays, totalLength);
} catch (IOException ioe) {
throw new SqlException(null,
new ClientMessageId(
SQLState.LANG_STREAMING_COLUMN_I_O_EXCEPTION),
typeDesc,
ioe
);
}
}
public static boolean isLengthObtained(Lob l){
return l.lengthObtained_;
}
public abstract long length() throws SQLException;
protected static boolean isLayerBStreamingPossible( Agent agent ){
final NetConnection netConn =
( NetConnection ) agent.connection_ ;
final int securityMechanism =
netConn.getSecurityMechanism();
return
netConn.serverSupportsLayerBStreaming() &&
securityMechanism != NetConfiguration.SECMEC_EUSRIDDTA &&
securityMechanism != NetConfiguration.SECMEC_EUSRPWDDTA;
}
public boolean willBeLayerBStreamed() {
return willBeLayerBStreamed_;
}
/**
* Check whether this Lob is based on a locator
* @return true if Lob is based on locator, false otherwise
*/
public boolean isLocator() {
return ((dataType_ & LOCATOR) == LOCATOR);
}
/**
* Get locator for this Lob
* @return locator for this Lob, INVALID_LOCATOR if Lob is not
* based on locator
*/
public int getLocator() {
return locator_;
}
/**
* Checks the <code>pos</code> and <code>length</code>.
*
* @param pos a long that contains the position that needs to be checked
* @param length a long that contains the length that needs to be checked
* @throws SQLException if
* a) pos <= 0
* b) pos > (length of LOB)
* c) length < 0
* d) (pos -1) + length > (length of LOB)
*/
protected void checkPosAndLength(long pos, long length)
throws SQLException {
if (pos <= 0) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_BAD_POSITION),
new Long(pos)).getSQLException();
}
if (length < 0) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_NONPOSITIVE_LENGTH),
new Integer((int)length)).getSQLException();
}
if (length > (this.length() - (pos -1))) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.POS_AND_LENGTH_GREATER_THAN_LOB),
new Long(pos), new Long(length)).getSQLException();
}
}
/**
* Increments and returns the new updateCount
* of this <code>Lob</code>. The method needs to be
* synchronized since multiple updates can
* happen on this <code>Lob</code> simultaneously.
* It will be called from the
* 1) Locator Writers
* 2) Locator OutputStreams
* 3) From the update methods
* within the Lobs like setString, truncate.
* since all of the above acesses are inside
* the am package, this method will have
* default access. We do not need to worry
* about the non-locator streams since
* non-locator InputStreams would not
* depend on updateCount for invalidation
*/
protected synchronized void incrementUpdateCount() {
updateCount++;
}
/**
* Returns the current updateCount of the Clob.
*/
long getUpdateCount() {
return updateCount;
}
/**
* Calls SqlLength() to check if the Locator associated
* with the underlying Lob is valid. If it is not
* it throws an exception.
*
* @throws SqlException
*
*/
void checkForLocatorValidity() throws SqlException {
// As of now there is no other way of determining that
//the locator associated with the underlying LOB is not
//valid
sqlLength();
}
/**
* Checks if isValid is true and whether the transaction that
* created the Lob is still active. If any of which is not true throws
* a SQLException stating that a method has been called on
* an invalid LOB object.
*
* @throws SQLException if isValid is not true or the transaction that
* created the Lob is not active
*/
protected void checkValidity() throws SQLException{
// If there isn't an open connection, the Lob is invalid.
try {
agent_.connection_.checkForClosedConnection();
} catch (SqlException se) {
throw se.getSQLException();
}
if(!isValid_ || (isLocator() &&
(transactionID_ != agent_.connection_.getTransactionID())))
throw new SqlException(null,new ClientMessageId(SQLState.LOB_OBJECT_INVALID))
.getSQLException();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.execute;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.internal.cache.functions.TestFunction.TEST_FUNCTION2;
import static org.apache.geode.internal.cache.functions.TestFunction.TEST_FUNCTION9;
import static org.apache.geode.internal.cache.functions.TestFunction.TEST_FUNCTION_NO_LASTRESULT;
import static org.apache.geode.test.dunit.Host.getHost;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.PartitionAttributes;
import org.apache.geode.cache.PartitionAttributesFactory;
import org.apache.geode.cache.PartitionResolver;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionFactory;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.cache.execute.Execution;
import org.apache.geode.cache.execute.Function;
import org.apache.geode.cache.execute.FunctionException;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.functions.TestFunction;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.cache.CacheTestCase;
import org.apache.geode.test.junit.categories.FunctionServiceTest;
@Category({FunctionServiceTest.class})
@SuppressWarnings("serial")
public class PRFunctionExecutionWithResultSenderDUnitTest extends CacheTestCase {
private static final String STRING_KEY = "execKey";
private String regionName;
@Before
public void setUp() {
regionName = getUniqueName();
}
/**
* Test remote execution by a pure accessor which doesn't have the function factory present.
*/
@Test
public void testRemoteSingleKeyExecution_byName() throws Exception {
VM accessor = getHost(0).getVM(2);
VM datastore = getHost(0).getVM(3);
accessor.invoke(() -> {
createPartitionedRegion(regionName, 0, 0);
});
datastore.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION2));
});
accessor.invoke(() -> {
Region<String, Integer> region = getPartitionedRegion(regionName);
Set<String> stringKeys = new HashSet<>();
stringKeys.add(STRING_KEY);
Function function = new TestFunction(true, TEST_FUNCTION2);
FunctionService.registerFunction(function);
Execution execution = FunctionService.onRegion(region);
region.put(STRING_KEY, 1);
region.put(STRING_KEY + "3", 3);
region.put(STRING_KEY + "4", 4);
stringKeys.add(STRING_KEY + "3");
stringKeys.add(STRING_KEY + "4");
ResultCollector<Boolean, List<Boolean>> resultCollector1 =
execution.withFilter(stringKeys).setArguments(true).execute(function.getId());
assertThat(resultCollector1.getResult()).hasSize(1).containsExactly(true);
ResultCollector<List<Integer>, List<List<Integer>>> resultCollector2 =
execution.withFilter(stringKeys).setArguments(stringKeys).execute(function.getId());
List<List<Integer>> values = resultCollector2.getResult();
assertThat(values).hasSize(1);
assertThat(values.get(0)).hasSize(3).containsOnly(1, 3, 4);
Map<String, Integer> putData = new HashMap<>();
putData.put(STRING_KEY + "1", 2);
putData.put(STRING_KEY + "2", 3);
ResultCollector<Boolean, List<Boolean>> resultCollector3 =
execution.withFilter(stringKeys).setArguments(putData).execute(function.getId());
assertThat(resultCollector3.getResult()).hasSize(1).containsExactly(true);
assertThat(region.get(STRING_KEY + "1")).isEqualTo(2);
assertThat(region.get(STRING_KEY + "2")).isEqualTo(3);
});
}
/**
* Test remote execution by a pure accessor which doesn't have the function factory present And
* the function doesn't send last result. FunctionException is expected in this case
*/
@Test
public void testRemoteExecution_NoLastResult() throws Exception {
VM accessor = getHost(0).getVM(0);
VM datastore = getHost(0).getVM(1);
accessor.invoke(() -> {
createPartitionedRegion(regionName, 0, 0);
});
datastore.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION_NO_LASTRESULT));
});
accessor.invoke(() -> {
Region<String, Integer> region = getPartitionedRegion(regionName);
Set<String> stringKeys = new HashSet<>();
stringKeys.add(STRING_KEY);
Function<Boolean> function = new TestFunction<>(true, TEST_FUNCTION_NO_LASTRESULT);
FunctionService.registerFunction(function);
Execution<Boolean, Boolean, List<Boolean>> execution = FunctionService.onRegion(region);
region.put(STRING_KEY, 1);
region.put(STRING_KEY + "3", 3);
region.put(STRING_KEY + "4", 4);
stringKeys.add(STRING_KEY + "3");
stringKeys.add(STRING_KEY + "4");
ResultCollector<Boolean, List<Boolean>> resultCollector =
execution.withFilter(stringKeys).setArguments(true).execute(function.getId());
assertThatThrownBy(() -> resultCollector.getResult()).isInstanceOf(FunctionException.class)
.hasMessageContaining("did not send last result");
});
}
/**
* Test multi-key remote execution by a pure accessor which doesn't have the function factory
* present.
*/
@Test
public void testRemoteMultiKeyExecution_byName() throws Exception {
VM accessor = getHost(0).getVM(3);
VM datastore0 = getHost(0).getVM(0);
VM datastore1 = getHost(0).getVM(1);
VM datastore2 = getHost(0).getVM(2);
accessor.invoke(() -> {
createPartitionedRegion(regionName, 0, 0);
});
datastore0.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION9));
});
datastore1.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION9));
});
datastore2.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION9));
});
accessor.invoke(() -> {
PartitionedRegion pr = getPartitionedRegion(regionName);
Set<String> stringKeys = new HashSet<>();
for (int i = pr.getTotalNumberOfBuckets() * 2; i > 0; i--) {
stringKeys.add(STRING_KEY + i);
}
int valueIndex = 0;
for (String key : stringKeys) {
int value = valueIndex++;
pr.put(key, value);
}
Function function = new TestFunction(true, TEST_FUNCTION9);
FunctionService.registerFunction(function);
Execution<Boolean, Boolean, List<Boolean>> booleanExecution = FunctionService.onRegion(pr);
ResultCollector<Boolean, List<Boolean>> resultCollector1 =
booleanExecution.withFilter(stringKeys).setArguments(true).execute(function.getId());
List<Boolean> booleanResults = resultCollector1.getResult();
assertThat(booleanResults).hasSize(3).containsExactly(true, true, true);
Execution<Set<String>, List<Integer>, List<List<Integer>>> execution =
FunctionService.onRegion(pr);
ResultCollector<List<Integer>, List<List<Integer>>> resultCollector2 =
execution.withFilter(stringKeys).setArguments(stringKeys).execute(function.getId());
List<List<Integer>> valuesResults = resultCollector2.getResult();
assertThat(valuesResults).hasSize(pr.getTotalNumberOfBuckets() * 2 * 3);
});
}
/**
* Test ability to execute a multi-key function by a local data store
* <p>
* TODO: extract to IntegrationTest
*/
@Test
public void testLocalMultiKeyExecution_byName() throws Exception {
PartitionedRegion pr = createPartitionedRegion(regionName, 10, 0);
Set<String> stringKeys = new HashSet<>();
for (int i = pr.getTotalNumberOfBuckets() * 2; i > 0; i--) {
stringKeys.add(STRING_KEY + i);
}
int valueIndex = 0;
for (String key : stringKeys) {
int value = valueIndex++;
pr.put(key, value);
}
Function function = new TestFunction(true, TEST_FUNCTION9);
FunctionService.registerFunction(function);
Execution execution = FunctionService.onRegion(pr);
ResultCollector<Boolean, List<Boolean>> resultCollector1 =
execution.withFilter(stringKeys).setArguments(true).execute(function.getId());
List<Boolean> results = resultCollector1.getResult();
assertThat(results).hasSize(1).containsExactly(true);
ResultCollector<List<Integer>, List<List<Integer>>> resultCollector2 =
execution.withFilter(stringKeys).setArguments(stringKeys).execute(function.getId());
List<List<Integer>> valuesResults = resultCollector2.getResult();
assertThat(valuesResults).hasSize(pr.getTotalNumberOfBuckets() * 2);
}
/**
* Test local execution on datastore with function that doesn't send last result.
* FunctionException is expected in this case
*
* <p>
* TODO: extract to IntegrationTest
*/
@Test
public void testLocalExecution_NoLastResult() throws Exception {
PartitionedRegion pr = createPartitionedRegion(regionName, 10, 0);
Set<String> stringKeys = new HashSet<>();
for (int i = pr.getTotalNumberOfBuckets() * 2; i > 0; i--) {
stringKeys.add(STRING_KEY + i);
}
int valueIndex = 0;
for (String key : stringKeys) {
int value = valueIndex++;
pr.put(key, value);
}
Function<Boolean> function = new TestFunction<>(true, TEST_FUNCTION_NO_LASTRESULT);
FunctionService.registerFunction(function);
Execution<Boolean, Boolean, List<Boolean>> execution = FunctionService.onRegion(pr);
ResultCollector<Boolean, List<Boolean>> resultCollector =
execution.withFilter(stringKeys).setArguments(true).execute(function.getId());
assertThatThrownBy(() -> resultCollector.getResult()).isInstanceOf(FunctionException.class)
.hasMessageContaining("did not send last result");
}
/**
* Test execution on all datastores with function that doesn't send last result. FunctionException
* is expected in this case
*/
@Test
public void testExecutionOnAllNodes_NoLastResult() throws Exception {
VM datastore0 = getHost(0).getVM(0);
VM datastore1 = getHost(0).getVM(1);
VM datastore2 = getHost(0).getVM(2);
VM datastore3 = getHost(0).getVM(3);
datastore0.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION_NO_LASTRESULT));
});
datastore1.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION_NO_LASTRESULT));
});
datastore2.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION_NO_LASTRESULT));
});
datastore3.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION_NO_LASTRESULT));
});
datastore3.invoke(() -> {
PartitionedRegion pr = getPartitionedRegion(regionName);
Collection<String> stringKeys = new HashSet<>();
for (int i = pr.getTotalNumberOfBuckets() * 3; i > 0; i--) {
stringKeys.add(STRING_KEY + i);
}
int valueIndex = 0;
for (String key : stringKeys) {
int value = valueIndex++;
pr.put(key, value);
}
// Assert there is data in each bucket
for (int bucketId = 0; bucketId < pr.getTotalNumberOfBuckets(); bucketId++) {
assertThat(pr.getBucketKeys(bucketId).size()).isGreaterThan(0);
}
Function<Boolean> function = new TestFunction<>(true, TEST_FUNCTION_NO_LASTRESULT);
FunctionService.registerFunction(function);
Execution<Boolean, Boolean, List<Boolean>> execution = FunctionService.onRegion(pr);
ResultCollector resultCollector = execution.setArguments(true).execute(function.getId());
assertThatThrownBy(() -> resultCollector.getResult()).isInstanceOf(FunctionException.class)
.hasMessageContaining("did not send last result");
});
}
@Test
public void testExecutionOnAllNodes_byName() throws Exception {
VM datastore0 = getHost(0).getVM(0);
VM datastore1 = getHost(0).getVM(1);
VM datastore2 = getHost(0).getVM(2);
VM datastore3 = getHost(0).getVM(3);
datastore0.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION9));
});
datastore1.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION9));
});
datastore2.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION9));
});
datastore3.invoke(() -> {
createPartitionedRegion(regionName, 10, 0, 17);
FunctionService.registerFunction(new TestFunction(true, TEST_FUNCTION9));
});
datastore3.invoke(() -> {
PartitionedRegion pr = getPartitionedRegion(regionName);
Collection<String> stringKeys = new HashSet<>();
for (int i = pr.getTotalNumberOfBuckets() * 3; i > 0; i--) {
stringKeys.add(STRING_KEY + i);
}
int valueIndex = 0;
for (String key : stringKeys) {
int value = valueIndex++;
pr.put(key, value);
}
// Assert there is data in each bucket
for (int bucketId = 0; bucketId < pr.getTotalNumberOfBuckets(); bucketId++) {
assertThat(pr.getBucketKeys(bucketId).size()).isGreaterThan(0);
}
Function<Boolean> function = new TestFunction<>(true, TEST_FUNCTION9);
FunctionService.registerFunction(function);
Execution<Boolean, Boolean, List<Boolean>> execution = FunctionService.onRegion(pr);
ResultCollector<Boolean, List<Boolean>> resultCollector =
execution.setArguments(true).execute(function.getId());
List<Boolean> results = resultCollector.getResult();
assertThat(results).hasSize(4).containsExactly(true, true, true, true);
});
}
/**
* TODO: extract to IntegrationTest with RegressionTest suffix
* <p>
* TODO: this test has nothing to do with PartitionedRegions. Move it to a FunctionService test
* <p>
* TRAC #41832: Function execution should throw Exceptions when any configuration is not valid
*/
@Test
public void executeThrowsIfRegionIsNotReplicated() throws Exception {
Properties config = new Properties();
config.put(MCAST_PORT, "0");
config.put(LOCATORS, "");
getCache(config);
AttributesFactory factory = new AttributesFactory();
factory.setDataPolicy(DataPolicy.EMPTY); // ie, a NON-REPLICATED region
Region region = getCache().createRegion(regionName, factory.create());
Function function = new TestFunction(true, TEST_FUNCTION2);
FunctionService.registerFunction(function);
Execution execution = FunctionService.onRegion(region).setArguments(true);
assertThatThrownBy(() -> execution.execute(function.getId()))
.isInstanceOf(FunctionException.class)
.hasMessageStartingWith("No Replicated Region found for executing function");
}
private PartitionedRegion createPartitionedRegion(final String regionName,
final int localMaxMemory, final int redundancy) {
PartitionAttributes pa = createPartitionAttributes(localMaxMemory, redundancy);
return createPartitionedRegion(regionName, pa);
}
private PartitionedRegion createPartitionedRegion(final String regionName,
final int localMaxMemory, final int redundancy, final PartitionResolver resolver) {
PartitionAttributes pa = createPartitionAttributes(localMaxMemory, redundancy, resolver);
return createPartitionedRegion(regionName, pa);
}
private PartitionedRegion createPartitionedRegion(final String regionName,
final int localMaxMemory, final int redundancy, final int totalNumBuckets) {
PartitionAttributes pa = createPartitionAttributes(localMaxMemory, redundancy, totalNumBuckets);
return createPartitionedRegion(regionName, pa);
}
private PartitionedRegion createPartitionedRegion(final String regionName,
final PartitionAttributes partitionAttributes) {
RegionFactory regionFactory = getCache().createRegionFactory(RegionShortcut.PARTITION);
regionFactory.setPartitionAttributes(partitionAttributes);
return (PartitionedRegion) regionFactory.create(regionName);
}
private PartitionAttributes createPartitionAttributes(final int localMaxMemory,
final int redundancy) {
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setLocalMaxMemory(localMaxMemory);
paf.setRedundantCopies(redundancy);
return paf.create();
}
private PartitionAttributes createPartitionAttributes(final int localMaxMemory,
final int redundancy, final PartitionResolver resolver) {
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setLocalMaxMemory(localMaxMemory);
paf.setPartitionResolver(resolver);
paf.setRedundantCopies(redundancy);
return paf.create();
}
private PartitionAttributes createPartitionAttributes(final int localMaxMemory,
final int redundancy, final PartitionResolver resolver, final int totalNumBuckets) {
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setLocalMaxMemory(localMaxMemory);
paf.setPartitionResolver(resolver);
paf.setRedundantCopies(redundancy);
paf.setTotalNumBuckets(totalNumBuckets);
return paf.create();
}
private PartitionAttributes createPartitionAttributes(final int localMaxMemory,
final int redundancy, final long startupRecoveryDelay, final int totalNumBuckets) {
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setLocalMaxMemory(localMaxMemory);
paf.setRedundantCopies(redundancy);
paf.setStartupRecoveryDelay(startupRecoveryDelay);
paf.setTotalNumBuckets(totalNumBuckets);
return paf.create();
}
private PartitionAttributes createPartitionAttributes(final int localMaxMemory,
final int redundancy, final int totalNumBuckets) {
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setLocalMaxMemory(localMaxMemory);
paf.setRedundantCopies(redundancy);
paf.setTotalNumBuckets(totalNumBuckets);
return paf.create();
}
private PartitionAttributes createPartitionAttributes(final String colocatedWith,
final int localMaxMemory, final PartitionResolver resolver, final int redundancy,
final int totalNumBuckets) {
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setColocatedWith(colocatedWith);
paf.setLocalMaxMemory(localMaxMemory);
paf.setPartitionResolver(resolver);
paf.setRedundantCopies(redundancy);
paf.setTotalNumBuckets(totalNumBuckets);
return paf.create();
}
private PartitionedRegion getPartitionedRegion(final String regionName) {
return (PartitionedRegion) getCache().getRegion(regionName);
}
}
| |
package com.dolbz.jenumerable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Dictionary;
import java.util.Iterator;
import java.util.List;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
import com.dolbz.jenumerable.altlambda.DualTranslator;
import com.dolbz.jenumerable.altlambda.IndexPredicate;
import com.dolbz.jenumerable.altlambda.IndexTranslator;
import com.dolbz.jenumerable.altlambda.Predicate;
import com.dolbz.jenumerable.altlambda.Translator;
import com.dolbz.jenumerable.altlambda.numeric.DoubleTranslator;
import com.dolbz.jenumerable.altlambda.numeric.FloatTranslator;
import com.dolbz.jenumerable.altlambda.numeric.IntegerTranslator;
import com.dolbz.jenumerable.altlambda.numeric.LongTranslator;
import com.dolbz.jenumerable.exceptions.JEnumerableOverflowException;
import com.dolbz.jenumerable.interfaces.Grouping;
import com.dolbz.jenumerable.interfaces.Lookup;
import com.dolbz.jenumerable.interfaces.OrderedEnumerable;
import com.dolbz.jenumerable.util.DefaultEqualityComparer;
import com.dolbz.jenumerable.util.EqualityComparer;
/**
* Implementation class for JEnumerable. Has/will have equivalent methods to
* .NET's System.Linq.Enumerable class
*
* @see http://msdn.microsoft.com/en-us/library/system.linq.enumerable.aspx
*
* Using a wrapper as Java's @java.lang.Iterable can't have extension
* methods attached to it :(
*
* @author nrandle
*
*/
public class JEnumerable<TSource> implements Iterable<TSource> {
private final Iterable<TSource> wrappedIterable;
/**
* Constructor to wrap an @Iterable so that we can unlock the JEnumerable
* goodness.
*
* @param source
*/
public JEnumerable(final Iterable<TSource> source) {
if (source == null) {
throw new IllegalArgumentException("source is null");
}
wrappedIterable = source;
}
/**
* Constructor which takes an array of TSource elements
*
* @param source
* the source array
*/
public JEnumerable(final TSource[] source) {
wrappedIterable = Arrays.asList(source);
}
public Iterator<TSource> iterator() {
return wrappedIterable.iterator();
}
/** Where **/
public JEnumerable<TSource> where(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
return new JEnumerable<TSource>(new WhereIterable<TSource>(predicate,
this.wrappedIterable));
}
public JEnumerable<TSource> where(final IndexPredicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
return new JEnumerable<TSource>(new WhereIterable<TSource>(predicate,
this.wrappedIterable));
}
/** Select **/
public <TResult> JEnumerable<TResult> select(
final Translator<TSource, TResult> selector) {
if (selector == null) {
throw new IllegalArgumentException("selector is null");
}
return new JEnumerable<TResult>(new SelectIterable<TSource, TResult>(
selector, this.wrappedIterable));
}
public <TResult> JEnumerable<TResult> select(
final IndexTranslator<TSource, TResult> selector) {
if (selector == null) {
throw new IllegalArgumentException("selector is null");
}
return new JEnumerable<TResult>(new SelectIterable<TSource, TResult>(
selector, this.wrappedIterable));
}
/** Range **/
public static JEnumerable<Integer> range(final int start, final int count) {
if (count < 0) {
throw new IllegalArgumentException("count is negative");
}
// Validating the range doesn't go out of bounds isn't required. Java
// allows incrementing beyond Integer.MAX_VALUE
return new JEnumerable<Integer>(new RangeIterable(start, count));
}
/** Empty **/
public static <TResult> JEnumerable<TResult> empty() {
// TODO requires caching which may be interesting...
throw new NotImplementedException();
}
/** Repeat **/
public static <TResult> JEnumerable<TResult> repeat(final TResult element,
final int count) {
throw new IllegalStateException("Not Implemented yet");
}
/** Count **/
public int count() {
if (this.wrappedIterable instanceof Collection) {
return ((Collection<TSource>) wrappedIterable).size();
} else {
return count(new Predicate<TSource>() {
@Override
public boolean check(final TSource source) {
return true;
}
});
}
}
public int count(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
int count = 0;
for (TSource item : wrappedIterable) {
if (predicate.check(item)) {
if (count == Integer.MAX_VALUE) {
// Don't overflow back to MIN_VALUE as this could give an
// incorrect count!
throw new JEnumerableOverflowException(
"Count is larger than the maximum value. Try longCount instead");
} else {
count++;
}
}
}
return count;
}
/** LongCount **/
public long longCount() {
return count(new Predicate<TSource>() {
@Override
public boolean check(final TSource source) {
return true;
}
});
}
public long longCount(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
long count = 0;
for (TSource item : wrappedIterable) {
if (predicate.check(item)) {
if (count == Long.MAX_VALUE) {
// Don't overflow back to MIN_VALUE as this could give an
// incorrect count!
throw new JEnumerableOverflowException(
"Count is larger than the maximum value. Try longCount instead");
} else {
count++;
}
}
}
return count;
}
/** SelectMany **/
// public <TResult> JEnumerable<TResult> selectMany() {
// // TODO
// }
/** Any **/
public boolean any() {
if (wrappedIterable.iterator().hasNext()) {
return true;
} else {
return false;
}
}
public boolean any(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
for (TSource obj : wrappedIterable) {
if (predicate.check(obj)) {
return true;
}
}
return false;
}
/** All **/
public boolean all(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
for (TSource obj : wrappedIterable) {
if (!predicate.check(obj)) {
return false;
}
}
return true;
}
/** First **/
public TSource first() {
Iterator<TSource> iterator = wrappedIterable.iterator();
if (iterator.hasNext()) {
return iterator.next();
}
throw new IllegalStateException("Sequence was empty");
}
public TSource first(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
for (TSource obj : wrappedIterable) {
if (predicate.check(obj)) {
return obj;
}
}
throw new IllegalStateException("No elements matched the predicate");
}
/** Last **/
public TSource last() {
if (wrappedIterable.iterator().hasNext()) {
TSource latest = null;
for (TSource obj : wrappedIterable) {
latest = obj;
}
return latest;
} else {
throw new IllegalStateException("Sequence was empty");
}
}
public TSource last(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
TSource latest = null;
for (TSource obj : wrappedIterable) {
if (predicate.check(obj)) {
latest = obj;
}
}
if (latest == null) {
throw new IllegalStateException("No elements matched the predicate");
} else {
return latest;
}
}
/** Single **/
public TSource single() {
Iterator<TSource> iterator = wrappedIterable.iterator();
if (iterator.hasNext()) {
TSource returnVal = iterator.next();
if (iterator.hasNext()) {
throw new IllegalStateException(
"More than a single element in the sequence");
} else {
return returnVal;
}
}
throw new IllegalStateException("Sequence was empty");
}
public TSource single(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
TSource returnCandidate = null;
for (TSource obj : wrappedIterable) {
if (predicate.check(obj)) {
if (returnCandidate != null) {
throw new IllegalStateException(
"More than a single element in the sequence");
}
returnCandidate = obj;
}
}
if (returnCandidate != null) {
return returnCandidate;
} else {
throw new IllegalStateException("No elements matched the predicate");
}
}
/** Aggregate **/
public TSource aggregate(
final DualTranslator<TSource, TSource, TSource> func) {
if (func == null) {
throw new IllegalArgumentException("func is null");
}
Iterator<TSource> iterator = wrappedIterable.iterator();
if (!iterator.hasNext()) {
throw new IllegalStateException("source JEnumerable was empty");
}
TSource current = iterator.next();
while (iterator.hasNext()) {
current = func.translate(current, iterator.next());
}
return current;
}
public <TAccumulate> TAccumulate aggregate(final TAccumulate seed,
final DualTranslator<TAccumulate, TSource, TAccumulate> func) {
return this.aggregate(seed, func,
new Translator<TAccumulate, TAccumulate>() {
@Override
public TAccumulate translate(final TAccumulate source) {
return source;
}
});
}
public <TAccumulate, TResult> TResult aggregate(final TAccumulate seed,
final DualTranslator<TAccumulate, TSource, TAccumulate> func,
final Translator<TAccumulate, TResult> resultSelector) {
if (func == null) {
throw new IllegalArgumentException("func is null");
}
if (resultSelector == null) {
throw new IllegalArgumentException("resultSelector is null");
}
TAccumulate current = seed;
for (TSource item : wrappedIterable) {
current = func.translate(current, item);
}
return resultSelector.translate(current);
}
/** Distinct **/
public JEnumerable<TSource> distinct() {
return new JEnumerable<TSource>(new DistinctIterable<TSource>(
wrappedIterable, new DefaultEqualityComparer<TSource>()));
}
public JEnumerable<TSource> distinct(
final EqualityComparer<TSource> comparer) {
return new JEnumerable<TSource>(new DistinctIterable<TSource>(
wrappedIterable, comparer));
}
/** Union **/
public JEnumerable<TSource> union(final JEnumerable<TSource> second) {
if (second == null) {
throw new IllegalArgumentException("second sequence is null");
}
return new JEnumerable<TSource>(
new UnionIterable<TSource>(wrappedIterable, second,
new DefaultEqualityComparer<TSource>()));
}
public JEnumerable<TSource> union(final JEnumerable<TSource> second,
final EqualityComparer<TSource> comparer) {
if (second == null) {
throw new IllegalArgumentException("second sequence is null");
}
return new JEnumerable<TSource>(new UnionIterable<TSource>(
wrappedIterable, second, comparer));
}
/** Intersect **/
public JEnumerable<TSource> intersect(final JEnumerable<TSource> second) {
if (second == null) {
throw new IllegalArgumentException("second sequence is null");
}
return new JEnumerable<TSource>(
new IntersectIterable<TSource>(wrappedIterable, second,
new DefaultEqualityComparer<TSource>()));
}
public JEnumerable<TSource> intersect(final JEnumerable<TSource> second,
final EqualityComparer<TSource> comparer) {
if (second == null) {
throw new IllegalArgumentException("second sequence is null");
}
return new JEnumerable<TSource>(new IntersectIterable<TSource>(
wrappedIterable, second, comparer));
}
/** Except **/
public JEnumerable<TSource> except(final JEnumerable<TSource> second) {
if (second == null) {
throw new IllegalArgumentException("second sequence is null");
}
return new JEnumerable<TSource>(
new ExceptIterable<TSource>(wrappedIterable, second,
new DefaultEqualityComparer<TSource>()));
}
public JEnumerable<TSource> except(final JEnumerable<TSource> second,
final EqualityComparer<TSource> comparer) {
if (second == null) {
throw new IllegalArgumentException("second sequence is null");
}
return new JEnumerable<TSource>(new ExceptIterable<TSource>(
wrappedIterable, second, comparer));
}
/** ToLookup **/
public <TKey> Lookup<TKey, TSource> toLookup(
final Translator<TSource, TKey> keySelector) {
throw new NotImplementedException(); // TODO
}
public <TKey> Lookup<TKey, TSource> toLookup(
final Translator<TSource, TKey> keySelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement> Lookup<TKey, TElement> toLookup(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement> Lookup<TKey, TElement> toLookup(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
/** Join **/
// TOuter is the same as TSource. In java we can't choose a different name
// so sticking with TSource
public <TInner, TKey, TResult> JEnumerable<TResult> join(
final JEnumerable<TInner> inner,
final Translator<TSource, TKey> outerKeySelector,
final Translator<TSource, TKey> innerKeySelector,
final DualTranslator<TSource, TInner, TResult> resultSelector) {
throw new NotImplementedException(); // TODO
}
public <TInner, TKey, TResult> JEnumerable<TResult> join(
final JEnumerable<TInner> inner,
final Translator<TSource, TKey> outerKeySelector,
final Translator<TSource, TKey> innerKeySelector,
final DualTranslator<TSource, TInner, TResult> resultSelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
/** ToList **/
public List<TSource> toList() {
List<TSource> result = new ArrayList<TSource>();
for (TSource elem : wrappedIterable) {
result.add(elem);
}
return result;
}
/** GroupBy **/
public <TKey> JEnumerable<Grouping<TKey, TSource>> groupBy(
final Translator<TSource, TKey> keySelector) {
throw new NotImplementedException(); // TODO
}
public <TKey> JEnumerable<Grouping<TKey, TSource>> groupBy(
final Translator<TSource, TKey> keySelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement> JEnumerable<Grouping<TKey, TSource>> groupBy(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement> JEnumerable<Grouping<TKey, TSource>> groupBy(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
public <TKey, TResult> JEnumerable<TResult> groupBy(
final Translator<TSource, TKey> keySelector,
final DualTranslator<TKey, JEnumerable<TSource>, TResult> resultSelector) {
throw new NotImplementedException(); // TODO
}
public <TKey, TResult> JEnumerable<TResult> groupBy(
final Translator<TSource, TKey> keySelector,
final DualTranslator<TKey, JEnumerable<TSource>, TResult> resultSelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement, TResult> JEnumerable<TResult> groupBy(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector,
final DualTranslator<TKey, JEnumerable<TSource>, TResult> resultSelector) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement, TResult> JEnumerable<TResult> groupBy(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector,
final DualTranslator<TKey, JEnumerable<TSource>, TResult> resultSelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
/** GroupJoin **/
public <TInner, TKey, TResult> JEnumerable<TResult> groupJoin(
final JEnumerable<TInner> inner,
final Translator<TSource, TKey> outerKeySelector,
final Translator<TInner, TKey> innerKeySelector,
final DualTranslator<TSource, JEnumerable<TInner>, TResult> resultSelector) {
throw new NotImplementedException(); // TODO
}
public <TInner, TKey, TResult> JEnumerable<TResult> groupJoin(
final JEnumerable<TInner> inner,
final Translator<TSource, TKey> outerKeySelector,
final Translator<TInner, TKey> innerKeySelector,
final DualTranslator<TSource, JEnumerable<TInner>, TResult> resultSelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
/** Take **/
public JEnumerable<TSource> take(final int count) {
return new JEnumerable<TSource>(new TakeIterable<TSource>(count,
wrappedIterable));
}
/** TakeWhile **/
public JEnumerable<TSource> takeWhile(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
return takeWhile(new IndexPredicate<TSource>() {
@Override
public boolean check(final TSource source, final Integer index) {
return predicate.check(source);
}
});
}
public JEnumerable<TSource> takeWhile(
final IndexPredicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
return new JEnumerable<TSource>(new TakeWhileIterable<TSource>(
wrappedIterable, predicate));
}
/** Skip **/
public JEnumerable<TSource> skip(final int count) {
return new JEnumerable<TSource>(new SkipIterable<TSource>(count,
wrappedIterable));
}
/** SkipWhile **/
public JEnumerable<TSource> skipWhile(final Predicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
return skipWhile(new IndexPredicate<TSource>() {
@Override
public boolean check(final TSource source, final Integer index) {
return predicate.check(source);
}
});
}
public JEnumerable<TSource> skipWhile(
final IndexPredicate<TSource> predicate) {
if (predicate == null) {
throw new IllegalArgumentException("predicate is null");
}
return new JEnumerable<TSource>(new SkipWhileIterable<TSource>(
wrappedIterable, predicate));
}
/** ToArray **/
public Object[] toArray() {
// TODO this should return a TSource[]. I don't think this is possible
// in Java due to type erasure? Could take a clazz parameter here to
// return the correct array type? Something like...
/*
* public static <T> T[] createArray(List<T> list,Class<T> clazz){ T[]
* array = (T[]) Array.newInstance(clazz, list.size()); for(int i = 0; i
* < array.length; i++){ array[i] = list.get(i); } return array; }
*/
return this.toList().toArray();
}
/** ToDictionary **/
public <TKey> Dictionary<TKey, TSource> toDictionary(
final Translator<TSource, TKey> keySelector) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement> Dictionary<TKey, TElement> toDictionary(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector) {
throw new NotImplementedException(); // TODO
}
public <TKey> Dictionary<TKey, TSource> toDictionary(
final Translator<TSource, TKey> keySelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
public <TKey, TElement> Dictionary<TKey, TElement> toDictionary(
final Translator<TSource, TKey> keySelector,
final Translator<TSource, TElement> elementSelector,
final EqualityComparer<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
/** OrderBy **/
public <TKey> OrderedEnumerable<TSource> orderBy(
final Translator<TSource, TKey> keySelector) {
throw new NotImplementedException(); // TODO
}
public <TKey> OrderedEnumerable<TSource> orderBy(
final Translator<TSource, TKey> keySelector,
final Comparator<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
public <TKey> OrderedEnumerable<TSource> orderByDescending(
final Translator<TSource, TKey> keySelector) {
throw new NotImplementedException(); // TODO
}
public <TKey> OrderedEnumerable<TSource> orderByDescending(
final Translator<TSource, TKey> keySelector,
final Comparator<TKey> comparer) {
throw new NotImplementedException(); // TODO
}
/** ThenBy can be found on the OrderedEnumerable interface **/
/** Reverse **/
public JEnumerable<TSource> reverse() {
throw new NotImplementedException(); // TODO
}
/** Sum **/
// public int sum() {
// // TODO how to determine whether this is a valid JEnumerable type??
// throw new NotImplementedException(); // TODO
// }
// All of the numeric methods need special translators to prevent type
// erasure from giving the methods identical signatures...annoying...
public Integer sum(final IntegerTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Long sum(final LongTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Float sum(final FloatTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Double sum(final DoubleTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
/** Min **/
public Integer min() {
throw new NotImplementedException(); // TODO
}
public Integer min(final IntegerTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Long min(final LongTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Float min(final FloatTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Double min(final DoubleTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
/** Max **/
public Integer max() {
throw new NotImplementedException(); // TODO
}
public Integer max(final IntegerTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Long max(final LongTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Float max(final FloatTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Double max(final DoubleTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
/** Average **/
// public Double average() {
// throw new NotImplementedException(); // TODO
// }
public Double average(final IntegerTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Double average(final LongTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Float average(final FloatTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
public Double average(final DoubleTranslator<TSource> selector) {
throw new NotImplementedException(); // TODO
}
/** Contains **/
public boolean contains(final TSource value) {
throw new NotImplementedException(); // TODO
}
public boolean contains(final TSource value,
final EqualityComparer<TSource> comparer) {
throw new NotImplementedException(); // TODO
}
/** SequenceEqual **/
public boolean sequenceEqual(final JEnumerable<TSource> second) {
throw new NotImplementedException(); // TODO
}
public boolean sequenceEqual(final JEnumerable<TSource> second,
final EqualityComparer<TSource> comparer) {
throw new NotImplementedException(); // TODO
}
/** Zip **/
public <TSecond, TResult> JEnumerable<TResult> zip(
final JEnumerable<TSecond> second,
final DualTranslator<TSource, TSecond, TResult> resultSelector) {
throw new NotImplementedException(); // TODO
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Vladimir N. Molotkov
* @version $Revision$
*/
package org.apache.harmony.security.tests.java.security.cert;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.SignatureException;
import java.security.cert.Certificate;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateException;
import org.apache.harmony.security.tests.support.cert.MyCertificate;
import junit.framework.TestCase;
/**
* Tests for <code>Certificate</code> fields and methods
*
*/
public class CertificateTest extends TestCase {
/**
* Meaningless cert encoding just for testing purposes
*/
private static final byte[] testEncoding = new byte[] {
(byte)1, (byte)2, (byte)3, (byte)4, (byte)5
};
/**
* Constructor for CertificateTest.
* @param name
*/
public CertificateTest(String name) {
super(name);
}
//
// Tests
//
/**
* Test for <code>hashCode()</code> method<br>
* Assertion: returns hash of the <code>Certificate</code> instance
*/
public final void testHashCode() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
Certificate c2 = new MyCertificate("TEST_TYPE", testEncoding);
assertTrue(c1.hashCode() == c2.hashCode());
}
/**
* Test for <code>hashCode()</code> method<br>
* Assertion: hash code of equal objects should be the same
*/
public final void testHashCodeEqualsObject() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
Certificate c2 = new MyCertificate("TEST_TYPE", testEncoding);
assertTrue((c1.hashCode() == c2.hashCode()) && c1.equals(c2));
}
/**
* Test for <code>getType()</code> method<br>
* Assertion: returns this certificate type
*/
public final void testGetType() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
assertEquals("TEST_TYPE", c1.getType());
}
/**
* Test #1 for <code>equals(Object)</code> method<br>
* Assertion: object equals to itself
*/
public final void testEqualsObject01() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
assertTrue(c1.equals(c1));
}
/**
* Test for <code>equals(Object)</code> method<br>
* Assertion: object equals to other <code>Certificate</code>
* instance with the same state
*/
public final void testEqualsObject02() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
Certificate c2 = new MyCertificate("TEST_TYPE", testEncoding);
assertTrue(c1.equals(c2) && c2.equals(c1));
}
/**
* Test for <code>equals(Object)</code> method<br>
* Assertion: object not equals to <code>null</code>
*/
public final void testEqualsObject03() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
assertFalse(c1.equals(null));
}
/**
* Test for <code>equals(Object)</code> method<br>
* Assertion: object not equals to other which is not
* instance of <code>Certificate</code>
*/
public final void testEqualsObject04() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
assertFalse(c1.equals("TEST_TYPE"));
}
//
// the following tests just call methods
// that are abstract in <code>Certificate</code>
// (So they just like signature tests)
//
/**
* This test just calls <code>getEncoded()</code> method<br>
* @throws CertificateEncodingException
*/
public final void testGetEncoded() throws CertificateEncodingException {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
c1.getEncoded();
}
/**
* This test just calls <code>verify(PublicKey)</code> method<br>
*
* @throws InvalidKeyException
* @throws CertificateException
* @throws NoSuchAlgorithmException
* @throws NoSuchProviderException
* @throws SignatureException
*/
public final void testVerifyPublicKey()
throws InvalidKeyException,
CertificateException,
NoSuchAlgorithmException,
NoSuchProviderException,
SignatureException {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
c1.verify(null);
}
/**
* This test just calls <code>verify(PublicKey,String)</code> method<br>
*
* @throws InvalidKeyException
* @throws CertificateException
* @throws NoSuchAlgorithmException
* @throws NoSuchProviderException
* @throws SignatureException
*/
public final void testVerifyPublicKeyString()
throws InvalidKeyException,
CertificateException,
NoSuchAlgorithmException,
NoSuchProviderException,
SignatureException {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
c1.verify(null, null);
}
/**
* This test just calls <code>toString()</code> method<br>
*/
public final void testToString() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
c1.toString();
}
/**
* This test just calls <code>testGetPublicKey()</code> method<br>
*/
public final void testGetPublicKey() {
Certificate c1 = new MyCertificate("TEST_TYPE", testEncoding);
c1.getPublicKey();
}
}
| |
package org.knowm.xchange.cexio;
import java.math.BigDecimal;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.knowm.xchange.cexio.dto.account.CexIOBalance;
import org.knowm.xchange.cexio.dto.account.CexIOBalanceInfo;
import org.knowm.xchange.cexio.dto.marketdata.CexIODepth;
import org.knowm.xchange.cexio.dto.marketdata.CexIOTicker;
import org.knowm.xchange.cexio.dto.marketdata.CexIOTrade;
import org.knowm.xchange.cexio.dto.trade.CexIOOrder;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.OpenOrders;
import org.knowm.xchange.utils.DateUtils;
/**
* Author: brox Since: 2/6/14
*/
public class CexIOAdapters {
/**
* Adapts a CexIOTrade to a Trade Object
*
* @param trade CexIO trade object
* @param currencyPair trade currencies
* @return The XChange Trade
*/
public static Trade adaptTrade(CexIOTrade trade, CurrencyPair currencyPair) {
BigDecimal amount = trade.getAmount();
BigDecimal price = trade.getPrice();
Date date = DateUtils.fromMillisUtc(trade.getDate() * 1000L);
// Cex.IO API does not return trade type
return new Trade(null, amount, currencyPair, price, date, String.valueOf(trade.getTid()));
}
/**
* Adapts a CexIOTrade[] to a Trades Object
*
* @param cexioTrades The CexIO trade data returned by API
* @param currencyPair trade currencies
* @return The trades
*/
public static Trades adaptTrades(CexIOTrade[] cexioTrades, CurrencyPair currencyPair) {
List<Trade> tradesList = new ArrayList<Trade>();
long lastTradeId = 0;
for (CexIOTrade trade : cexioTrades) {
long tradeId = trade.getTid();
if (tradeId > lastTradeId) {
lastTradeId = tradeId;
}
// Date is reversed order. Insert at index 0 instead of appending
tradesList.add(0, adaptTrade(trade, currencyPair));
}
return new Trades(tradesList, lastTradeId, TradeSortType.SortByID);
}
/**
* Adapts a CexIOTicker to a Ticker Object
*
* @param ticker The exchange specific ticker
* @param currencyPair The currency pair (e.g. BTC/USD)
* @return The ticker
*/
public static Ticker adaptTicker(CexIOTicker ticker, CurrencyPair currencyPair) {
BigDecimal last = ticker.getLast();
BigDecimal bid = ticker.getBid();
BigDecimal ask = ticker.getAsk();
BigDecimal high = ticker.getHigh();
BigDecimal low = ticker.getLow();
BigDecimal volume = ticker.getVolume();
Date timestamp = new Date(ticker.getTimestamp() * 1000L);
return new Ticker.Builder().currencyPair(currencyPair).last(last).bid(bid).ask(ask).high(high).low(low).volume(volume).timestamp(timestamp)
.build();
}
/**
* Adapts Cex.IO Depth to OrderBook Object
*
* @param depth Cex.IO order book
* @param currencyPair The currency pair (e.g. BTC/USD)
* @return The XChange OrderBook
*/
public static OrderBook adaptOrderBook(CexIODepth depth, CurrencyPair currencyPair) {
List<LimitOrder> asks = createOrders(currencyPair, Order.OrderType.ASK, depth.getAsks());
List<LimitOrder> bids = createOrders(currencyPair, Order.OrderType.BID, depth.getBids());
Date date = new Date(depth.getTimestamp() * 1000);
return new OrderBook(date, asks, bids);
}
/**
* Adapts CexIOBalanceInfo to Wallet
*
* @param balance CexIOBalanceInfo balance
* @return The account info
*/
public static Wallet adaptWallet(CexIOBalanceInfo balance) {
List<Balance> balances = new ArrayList<Balance>();
// Adapt to XChange DTOs
if (balance.getBalanceBTC() != null) {
balances.add(adaptBalance(Currency.BTC, balance.getBalanceBTC()));
}
if (balance.getBalanceLTC() != null) {
balances.add(adaptBalance(Currency.LTC, balance.getBalanceLTC()));
}
if (balance.getBalanceNMC() != null) {
balances.add(adaptBalance(Currency.NMC, balance.getBalanceNMC()));
}
if (balance.getBalanceIXC() != null) {
balances.add(adaptBalance(Currency.IXC, balance.getBalanceIXC()));
}
if (balance.getBalanceDVC() != null) {
balances.add(adaptBalance(Currency.DVC, balance.getBalanceDVC()));
}
if (balance.getBalanceGHS() != null) {
balances.add(adaptBalance(Currency.GHs, balance.getBalanceGHS()));
}
if (balance.getBalanceUSD() != null) {
balances.add(adaptBalance(Currency.USD, balance.getBalanceUSD()));
}
if (balance.getBalanceDRK() != null) {
balances.add(adaptBalance(Currency.DRK, balance.getBalanceDRK()));
}
if (balance.getBalanceEUR() != null) {
balances.add(adaptBalance(Currency.EUR, balance.getBalanceEUR()));
}
if (balance.getBalanceDOGE() != null) {
balances.add(adaptBalance(Currency.DOGE, balance.getBalanceDOGE()));
}
if (balance.getBalanceFTC() != null) {
balances.add(adaptBalance(Currency.FTC, balance.getBalanceFTC()));
}
if (balance.getBalanceMEC() != null) {
balances.add(adaptBalance(Currency.MEC, balance.getBalanceMEC()));
}
if (balance.getBalanceWDC() != null) {
balances.add(adaptBalance(Currency.WDC, balance.getBalanceWDC()));
}
if (balance.getBalanceMYR() != null) {
balances.add(adaptBalance(Currency.MYR, balance.getBalanceMYR()));
}
if (balance.getBalanceAUR() != null) {
balances.add(adaptBalance(Currency.AUR, balance.getBalanceAUR()));
}
if (balance.getBalancePOT() != null) {
balances.add(adaptBalance(Currency.POT, balance.getBalancePOT()));
}
if (balance.getBalanceANC() != null) {
balances.add(adaptBalance(Currency.ANC, balance.getBalanceANC()));
}
if (balance.getBalanceDGB() != null) {
balances.add(adaptBalance(Currency.DGB, balance.getBalanceDGB()));
}
if (balance.getBalanceUSDE() != null) {
balances.add(adaptBalance(Currency.USDE, balance.getBalanceUSDE()));
}
return new Wallet(balances);
}
public static Balance adaptBalance(Currency currency, CexIOBalance balance) {
return new Balance(currency, null, balance.getAvailable(), balance.getOrders());
}
public static List<LimitOrder> createOrders(CurrencyPair currencyPair, Order.OrderType orderType, List<List<BigDecimal>> orders) {
List<LimitOrder> limitOrders = new ArrayList<LimitOrder>();
for (List<BigDecimal> o : orders) {
checkArgument(o.size() == 2, "Expected a pair (price, amount) but got {0} elements.", o.size());
limitOrders.add(createOrder(currencyPair, o, orderType));
}
return limitOrders;
}
public static LimitOrder createOrder(CurrencyPair currencyPair, List<BigDecimal> priceAndAmount, Order.OrderType orderType) {
return new LimitOrder(orderType, priceAndAmount.get(1), currencyPair, "", null, priceAndAmount.get(0));
}
public static void checkArgument(boolean argument, String msgPattern, Object... msgArgs) {
if (!argument) {
throw new IllegalArgumentException(MessageFormat.format(msgPattern, msgArgs));
}
}
public static OpenOrders adaptOpenOrders(List<CexIOOrder> cexIOOrderList) {
List<LimitOrder> limitOrders = new ArrayList<LimitOrder>();
for (CexIOOrder cexIOOrder : cexIOOrderList) {
Order.OrderType orderType = cexIOOrder.getType() == CexIOOrder.Type.buy ? Order.OrderType.BID : Order.OrderType.ASK;
String id = Long.toString(cexIOOrder.getId());
limitOrders.add(new LimitOrder(orderType, cexIOOrder.getPending(),
new CurrencyPair(cexIOOrder.getTradableIdentifier(), cexIOOrder.getTransactionCurrency()), id,
DateUtils.fromMillisUtc(cexIOOrder.getTime()), cexIOOrder.getPrice()));
}
return new OpenOrders(limitOrders);
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.definition;
import com.google.common.collect.ImmutableList;
import com.streamsets.datacollector.config.ConfigDefinition;
import com.streamsets.datacollector.config.ConfigGroupDefinition;
import com.streamsets.datacollector.config.RawSourceDefinition;
import com.streamsets.datacollector.config.StageDefinition;
import com.streamsets.datacollector.config.StageLibraryDefinition;
import com.streamsets.datacollector.config.StageType;
import com.streamsets.datacollector.creation.PipelineBeanCreator;
import com.streamsets.datacollector.creation.PipelineConfigBean;
import com.streamsets.datacollector.creation.RuleDefinitionsConfigBean;
import com.streamsets.datacollector.creation.StageConfigBean;
import com.streamsets.pipeline.api.OffsetCommitTrigger;
import com.streamsets.pipeline.api.OffsetCommitter;
import com.streamsets.pipeline.api.PipelineLifecycleStage;
import com.streamsets.pipeline.api.ProtoSource;
import com.streamsets.pipeline.api.Source;
import com.streamsets.pipeline.api.StatsAggregatorStage;
import com.streamsets.pipeline.api.ConfigGroups;
import com.streamsets.pipeline.api.ErrorStage;
import com.streamsets.pipeline.api.ExecutionMode;
import com.streamsets.pipeline.api.Executor;
import com.streamsets.pipeline.api.HideConfigs;
import com.streamsets.pipeline.api.Processor;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.StageDef;
import com.streamsets.pipeline.api.StageUpgrader;
import com.streamsets.pipeline.api.Target;
import com.streamsets.pipeline.api.impl.ErrorMessage;
import com.streamsets.pipeline.api.impl.Utils;
import org.apache.commons.lang3.ClassUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public abstract class StageDefinitionExtractor {
private static final StageDefinitionExtractor EXTRACTOR = new StageDefinitionExtractor() {};
public static StageDefinitionExtractor get() {
return EXTRACTOR;
}
static String getStageName(Class klass) {
return klass.getName().replace(".", "_").replace("$", "_");
}
public static List<String> getGroups(Class<? extends Stage> klass) {
Set<String> set = new LinkedHashSet<>();
addGroupsToList(klass, set);
List<Class<?>> allSuperclasses = ClassUtils.getAllSuperclasses(klass);
for(Class<?> superClass : allSuperclasses) {
if(!superClass.isInterface() && superClass.isAnnotationPresent(ConfigGroups.class)) {
addGroupsToList(superClass, set);
}
}
if(set.isEmpty()) {
set.add(""); // the default empty group
}
return new ArrayList<>(set);
}
@SuppressWarnings("unchecked")
private static void addGroupsToList(Class<?> klass, Set<String> set) {
ConfigGroups groups = klass.getAnnotation(ConfigGroups.class);
if (groups != null) {
Class<? extends Enum> groupKlass = (Class<? extends Enum>) groups.value();
for (Enum e : groupKlass.getEnumConstants()) {
set.add(e.name());
}
}
}
public List<ErrorMessage> validate(StageLibraryDefinition libraryDef, Class<? extends Stage> klass, Object contextMsg) {
List<ErrorMessage> errors = new ArrayList<>();
contextMsg = Utils.formatL("{} Stage='{}'", contextMsg, klass.getSimpleName());
StageDef sDef = klass.getAnnotation(StageDef.class);
if (sDef == null) {
errors.add(new ErrorMessage(DefinitionError.DEF_300, contextMsg));
} else {
if (!sDef.icon().isEmpty()) {
if (klass.getClassLoader().getResource(sDef.icon()) == null) {
errors.add(new ErrorMessage(DefinitionError.DEF_311, contextMsg, sDef.icon()));
}
}
StageType type = extractStageType(klass);
if (type == null) {
errors.add(new ErrorMessage(DefinitionError.DEF_302, contextMsg));
}
boolean errorStage = klass.getAnnotation(ErrorStage.class) != null;
if (type != null && errorStage && type == StageType.SOURCE) {
errors.add(new ErrorMessage(DefinitionError.DEF_303, contextMsg));
}
if (OffsetCommitter.class.isAssignableFrom(klass) && !Source.class.isAssignableFrom(klass)) {
errors.add(new ErrorMessage(DefinitionError.DEF_314, contextMsg));
}
if (OffsetCommitTrigger.class.isAssignableFrom(klass) && type != StageType.TARGET) {
errors.add(new ErrorMessage(DefinitionError.DEF_312, contextMsg));
}
HideConfigs hideConfigs = klass.getAnnotation(HideConfigs.class);
List<String> stageGroups = getGroups(klass);
List<ErrorMessage> configGroupErrors = ConfigGroupExtractor.get().validate(klass, contextMsg);
errors.addAll(configGroupErrors);
errors.addAll(ConfigGroupExtractor.get().validate(klass, contextMsg));
List<ErrorMessage> configErrors = ConfigDefinitionExtractor.get().validate(klass, stageGroups, contextMsg);
errors.addAll(configErrors);
List<ErrorMessage> rawSourceErrors = RawSourceDefinitionExtractor.get().validate(klass, contextMsg);
errors.addAll(rawSourceErrors);
if (type != null && rawSourceErrors.isEmpty() && type != StageType.SOURCE) {
if (RawSourceDefinitionExtractor.get().extract(klass, contextMsg) != null) {
errors.add(new ErrorMessage(DefinitionError.DEF_304, contextMsg));
}
}
if (!sDef.outputStreams().isEnum()) {
errors.add(new ErrorMessage(DefinitionError.DEF_305, contextMsg, sDef.outputStreams().getSimpleName()));
}
if (type != null && sDef.outputStreams() != StageDef.DefaultOutputStreams.class && type.isOneOf(StageType.TARGET, StageType.EXECUTOR)) {
errors.add(new ErrorMessage(DefinitionError.DEF_306, contextMsg));
}
boolean variableOutputStreams = StageDef.VariableOutputStreams.class.isAssignableFrom(sDef.outputStreams());
List<ExecutionMode> executionModes = ImmutableList.copyOf(sDef.execution());
if (executionModes.isEmpty()) {
errors.add(new ErrorMessage(DefinitionError.DEF_307, contextMsg));
}
String outputStreamsDrivenByConfig = sDef.outputStreamsDrivenByConfig();
if (variableOutputStreams && outputStreamsDrivenByConfig.isEmpty()) {
errors.add(new ErrorMessage(DefinitionError.DEF_308, contextMsg));
}
if (configErrors.isEmpty() && configGroupErrors.isEmpty()) {
List<ConfigDefinition> configDefs = extractConfigDefinitions(libraryDef, klass, hideConfigs, errors, contextMsg);
ConfigGroupDefinition configGroupDef = ConfigGroupExtractor.get().extract(klass, contextMsg);
errors.addAll(validateConfigGroups(configDefs, configGroupDef, contextMsg));
if (variableOutputStreams) {
boolean found = false;
for (ConfigDefinition configDef : configDefs) {
if (configDef.getName().equals(outputStreamsDrivenByConfig)) {
found = true;
break;
}
}
if (!found) {
errors.add(new ErrorMessage(DefinitionError.DEF_309, contextMsg, outputStreamsDrivenByConfig));
}
}
}
}
return errors;
}
public StageDefinition extract(StageLibraryDefinition libraryDef, Class<? extends Stage> klass, Object contextMsg) {
List<ErrorMessage> errors = validate(libraryDef, klass, contextMsg);
if (errors.isEmpty()) {
try {
contextMsg = Utils.formatL("{} Stage='{}'", contextMsg, klass.getSimpleName());
StageDef sDef = klass.getAnnotation(StageDef.class);
String name = getStageName(klass);
int version = sDef.version();
String label = sDef.label();
String description = sDef.description();
String icon = sDef.icon();
StageType type = extractStageType(klass);
boolean errorStage = klass.getAnnotation(ErrorStage.class) != null;
boolean statsAggregatorStage = klass.getAnnotation(StatsAggregatorStage.class) != null;
boolean pipelineLifecycleStage = klass.getAnnotation(PipelineLifecycleStage.class) != null;
HideConfigs hideConfigs = klass.getAnnotation(HideConfigs.class);
boolean preconditions = !errorStage && type != StageType.SOURCE &&
((hideConfigs == null) || !hideConfigs.preconditions());
boolean onRecordError = !errorStage && ((hideConfigs == null) || !hideConfigs.onErrorRecord());
List<ConfigDefinition> configDefinitions = extractConfigDefinitions(libraryDef, klass, hideConfigs, new ArrayList<ErrorMessage>(), contextMsg);
RawSourceDefinition rawSourceDefinition = RawSourceDefinitionExtractor.get().extract(klass, contextMsg);
ConfigGroupDefinition configGroupDefinition = ConfigGroupExtractor.get().extract(klass, contextMsg);
String outputStreamLabelProviderClass = (!type.isOneOf(StageType.TARGET, StageType.EXECUTOR)) ? sDef.outputStreams().getName() : null;
boolean variableOutputStreams = StageDef.VariableOutputStreams.class.isAssignableFrom(sDef.outputStreams());
int outputStreams = (variableOutputStreams || type.isOneOf(StageType.TARGET, StageType.EXECUTOR) )
? 0 : sDef.outputStreams().getEnumConstants().length;
List<ExecutionMode> executionModes = ImmutableList.copyOf(sDef.execution());
List<ExecutionMode> executionModesLibraryOverride = libraryDef.getStageExecutionModesOverride(klass);
if (executionModesLibraryOverride != null) {
executionModes = executionModesLibraryOverride;
}
List<String> libJarsRegex = ImmutableList.copyOf(sDef.libJarsRegex());
boolean recordsByRef = sDef.recordsByRef();
// If not a stage library, then dont add stage system configs
if (!PipelineBeanCreator.PIPELINE_LIB_DEFINITION.equals(libraryDef.getName())) {
List<ConfigDefinition> systemConfigs =
ConfigDefinitionExtractor.get().extract(StageConfigBean.class, Collections.<String> emptyList(),
"systemConfigs");
for (ConfigDefinition def : systemConfigs) {
switch (def.getName()) {
case StageConfigBean.STAGE_PRECONDITIONS_CONFIG:
case StageConfigBean.STAGE_REQUIRED_FIELDS_CONFIG:
if (preconditions) {
configDefinitions.add(def);
}
break;
case StageConfigBean.STAGE_ON_RECORD_ERROR_CONFIG:
if (onRecordError) {
configDefinitions.add(def);
}
break;
default:
configDefinitions.add(def);
}
}
}
for (ConfigDefinition cDef : configDefinitions) {
cDef.addAutoELDefinitions(libraryDef);
}
// This is on purpose and made difficult. The property will be:
// -Dcom.streamsets.pipeline.stage.destination.hdfs.HdfsDTarget.no.private.classloader
boolean privateClassLoader =
sDef.privateClassLoader() &&
System.getProperty(klass.getCanonicalName() + ".no.private.classloader") == null;
StageUpgrader upgrader;
try {
upgrader = sDef.upgrader().newInstance();
} catch (Exception ex) {
throw new IllegalArgumentException(Utils.format(
"Could not instantiate StageUpgrader for StageDefinition '{}': {}", name, ex.toString(), ex));
}
boolean resetOffset = sDef.resetOffset();
String onlineHelpRefUrl = sDef.onlineHelpRefUrl();
boolean offsetCommitController = (type == StageType.TARGET) &&
OffsetCommitTrigger.class.isAssignableFrom(klass);
boolean producesEvents = sDef.producesEvents();
return new StageDefinition(
libraryDef,
privateClassLoader,
klass,
name,
version,
label,
description,
type,
errorStage,
preconditions,
onRecordError,
configDefinitions,
rawSourceDefinition,
icon,
configGroupDefinition,
variableOutputStreams,
outputStreams,
outputStreamLabelProviderClass,
executionModes,
recordsByRef,
upgrader,
libJarsRegex,
resetOffset,
onlineHelpRefUrl,
statsAggregatorStage,
pipelineLifecycleStage,
offsetCommitController,
producesEvents
);
} catch (Exception e) {
throw new IllegalStateException("Exception while extracting stage definition for " + getStageName(klass), e);
}
} else {
throw new IllegalArgumentException(Utils.format("Invalid StageDefinition: {}", errors));
}
}
private List<ConfigDefinition> extractConfigDefinitions(StageLibraryDefinition libraryDef,
Class<? extends Stage> klass, HideConfigs hideConfigs, List<ErrorMessage> errors, Object contextMsg) {
List<String> stageGroups = getGroups(klass);
List<ConfigDefinition> cDefs = ConfigDefinitionExtractor.get().extract(klass, stageGroups, contextMsg);
Set<String> hideConfigSet = (hideConfigs != null) ?
new HashSet<>(Arrays.asList(hideConfigs.value())) :
Collections.<String>emptySet();
if (!hideConfigSet.isEmpty()) {
Iterator<ConfigDefinition> iterator = cDefs.iterator();
while (iterator.hasNext()) {
ConfigDefinition current = iterator.next();
if(hideConfigSet.contains(current.getName())) {
iterator.remove();
hideConfigSet.remove(current.getName());
}
}
if(!hideConfigSet.isEmpty()) {
for(String toHide : hideConfigSet) {
errors.add(new ErrorMessage(DefinitionError.DEF_313, contextMsg, toHide));
}
}
}
return cDefs;
}
private StageType extractStageType(Class<? extends Stage> klass) {
StageType type;
if (ProtoSource.class.isAssignableFrom(klass)) {
type = StageType.SOURCE;
} else if (Processor.class.isAssignableFrom(klass)) {
type = StageType.PROCESSOR;
} else if (Executor.class.isAssignableFrom(klass)) {
type = StageType.EXECUTOR;
} else if (Target.class.isAssignableFrom(klass)) {
type = StageType.TARGET;
} else if (PipelineConfigBean.class.isAssignableFrom(klass) ||
RuleDefinitionsConfigBean.class.isAssignableFrom(klass)) {
type = StageType.PIPELINE;
} else {
type = null;
}
return type;
}
private List<ErrorMessage> validateConfigGroups(List<ConfigDefinition> configs, ConfigGroupDefinition
groups, Object contextMsg) {
List<ErrorMessage> errors = new ArrayList<>();
for (ConfigDefinition config : configs) {
if (!config.getGroup().isEmpty()) {
if (!groups.getGroupNames().contains(config.getGroup())) {
errors.add(new ErrorMessage(DefinitionError.DEF_310, contextMsg, config.getName(), config.getGroup()));
}
}
}
return errors;
}
}
| |
package net.cortexmodders.atomtech.client.model.tileentity;
import java.util.HashMap;
import net.cortexmodders.atomtech.lib.ATLogger;
import net.cortexmodders.atomtech.tileentity.TileEntityCable;
import net.minecraft.client.model.ModelBase;
import net.minecraft.client.model.ModelRenderer;
import net.minecraft.tileentity.TileEntity;
import net.minecraftforge.common.ForgeDirection;
import org.lwjgl.opengl.GL11;
public class ModelCable extends ModelBase
{
private HashMap<String, ModelRenderer> parts;
public ModelCable()
{
textureWidth = 32;
textureHeight = 32;
parts = new HashMap<String, ModelRenderer>();
ModelRenderer south_left = new ModelRenderer(this, 0, 2);
south_left.addBox(-8F, -1F, -1.5F, 7, 1, 1);
south_left.setRotationPoint(0F, 0F, 0F);
south_left.setTextureSize(64, 32);
south_left.mirror = true;
setRotation(south_left, 0F, 1.570796F, 0F);
parts.put("south-left", south_left);
ModelRenderer south_right = new ModelRenderer(this, 0, 0);
south_right.addBox(-8F, -1F, 0.5F, 7, 1, 1);
south_right.setRotationPoint(0F, 0F, 0F);
south_right.setTextureSize(64, 32);
south_right.mirror = true;
setRotation(south_right, 0F, 1.570796F, 0F);
parts.put("south-right", south_right);
ModelRenderer north_left = new ModelRenderer(this, 0, 2);
north_left.addBox(1F, -1F, -1.5F, 7, 1, 1);
north_left.setRotationPoint(0F, 0F, 0F);
north_left.setTextureSize(64, 32);
north_left.mirror = true;
setRotation(north_left, 0F, 1.570796F, 0F);
parts.put("north-left", north_left);
ModelRenderer north_right = new ModelRenderer(this, 0, 0);
north_right.addBox(1F, -1F, 0.5F, 7, 1, 1);
north_right.setRotationPoint(0F, 0F, 0F);
north_right.setTextureSize(64, 32);
north_right.mirror = true;
setRotation(north_right, 0F, 1.570796F, 0F);
parts.put("north-right", north_right);
ModelRenderer east_left = new ModelRenderer(this, 0, 0);
east_left.addBox(-8F, -1F, -1.5F, 7, 1, 1);
east_left.setRotationPoint(0F, 0F, 0F);
east_left.setTextureSize(64, 32);
east_left.mirror = true;
setRotation(east_left, 0F, 0F, 0F);
parts.put("east-left", east_left);
ModelRenderer east_right = new ModelRenderer(this, 0, 2);
east_right.addBox(-8F, -1F, 0.5F, 7, 1, 1);
east_right.setRotationPoint(0F, 0F, 0F);
east_right.setTextureSize(64, 32);
east_right.mirror = true;
setRotation(east_right, 0F, 0F, 0F);
parts.put("east-right", east_right);
ModelRenderer west_left = new ModelRenderer(this, 0, 0);
west_left.addBox(1F, -1F, -1.5F, 7, 1, 1);
west_left.setRotationPoint(0F, 0F, 0F);
west_left.setTextureSize(64, 32);
west_left.mirror = true;
setRotation(west_left, 0F, 0F, 0F);
parts.put("west-left", west_left);
ModelRenderer west_right = new ModelRenderer(this, 0, 2);
west_right.addBox(1F, -1F, 0.5F, 7, 1, 1);
west_right.setRotationPoint(0F, 0F, 0F);
west_right.setTextureSize(64, 32);
west_right.mirror = true;
setRotation(west_right, 0F, 0F, 0F);
parts.put("west-right", west_right);
ModelRenderer corner_south_west = new ModelRenderer(this, 0, 0);
corner_south_west.addBox(0.5F, -1F, 0.5F, 1, 1, 1);
corner_south_west.setRotationPoint(0F, 0F, 0F);
corner_south_west.setTextureSize(32, 32);
corner_south_west.mirror = true;
setRotation(corner_south_west, 0F, 0F, 0F);
parts.put("corner-south-west", corner_south_west);
ModelRenderer corner_south_east = new ModelRenderer(this, 0, 2);
corner_south_east.addBox(-1.5F, -1F, 0.5F, 1, 1, 1);
corner_south_east.setRotationPoint(0F, 0F, 0F);
corner_south_east.setTextureSize(32, 32);
corner_south_east.mirror = true;
setRotation(corner_south_east, 0F, 0F, 0F);
parts.put("corner-south-east", corner_south_east);
ModelRenderer corner_north_west = new ModelRenderer(this, 0, 0);
corner_north_west.addBox(0.5F, -1F, -1.5F, 1, 1, 1);
corner_north_west.setRotationPoint(0F, 0F, 0F);
corner_north_west.setTextureSize(32, 32);
corner_north_west.mirror = true;
setRotation(corner_north_west, 0F, 0F, 0F);
parts.put("corner-north-west", corner_north_west);
ModelRenderer corner_north_east = new ModelRenderer(this, 0, 0);
corner_north_east.addBox(-1.5F, -1F, -1.5F, 1, 1, 1);
corner_north_east.setRotationPoint(0F, 0F, 0F);
corner_north_east.setTextureSize(32, 32);
corner_north_east.mirror = true;
setRotation(corner_north_east, 0F, 0F, 0F);
parts.put("corner-north-east", corner_north_east);
ModelRenderer connector_south = new ModelRenderer(this, 0, 2);
connector_south.addBox(-1.5F, -1F, 0.5F, 3, 1, 1);
connector_south.setRotationPoint(0F, 0F, 0F);
connector_south.setTextureSize(32, 32);
connector_south.mirror = true;
setRotation(connector_south, 0F, 0F, 0F);
parts.put("connector-south", connector_south);
ModelRenderer connector_north = new ModelRenderer(this, 0, 0);
connector_north.addBox(-1.5F, -1F, -1.5F, 3, 1, 1);
connector_north.setRotationPoint(0F, 0F, 0F);
connector_north.setTextureSize(32, 32);
connector_north.mirror = true;
setRotation(connector_north, 0F, 0F, 0F);
parts.put("connector-north", connector_north);
ModelRenderer connector_west = new ModelRenderer(this, 0, 0);
connector_west.addBox(-1.5F, -1F, 0.5F, 3, 1, 1);
connector_west.setRotationPoint(0F, 0F, 0F);
connector_west.setTextureSize(32, 32);
connector_west.mirror = true;
setRotation(connector_west, 0F, 1.570796F, 0F);
parts.put("connector-west", connector_west);
ModelRenderer connector_east = new ModelRenderer(this, 0, 2);
connector_east.addBox(-1.5F, -1F, -1.5F, 3, 1, 1);
connector_east.setRotationPoint(0F, 0F, 0F);
connector_east.setTextureSize(32, 32);
connector_east.mirror = true;
setRotation(connector_east, 0F, 1.570796F, 0F);
parts.put("connector-east", connector_east);
ModelRenderer crossover_south = new ModelRenderer(this, 0, 0);
crossover_south.addBox(-2.5F, -2F, -1.5F, 3, 1, 1);
crossover_south.setRotationPoint(0F, 0F, 0F);
crossover_south.setTextureSize(32, 32);
crossover_south.mirror = true;
setRotation(crossover_south, 0F, 0F, 0F);
parts.put("crossover-south", crossover_south);
ModelRenderer crossover_north = new ModelRenderer(this, 0, 2);
crossover_north.addBox(-0.5F, -2F, 0.5F, 3, 1, 1);
crossover_north.setRotationPoint(0F, 0F, 0F);
crossover_north.setTextureSize(32, 32);
crossover_north.mirror = true;
setRotation(crossover_north, 0F, 0F, 0F);
parts.put("crossover-north", crossover_north);
ModelRenderer box = new ModelRenderer(this, 0, 4);
box.addBox(-2.5F, -2F, -2.5F, 5, 2, 5);
box.setRotationPoint(0F, 0F, 0F);
box.setTextureSize(32, 32);
box.mirror = true;
setRotation(box, 0F, 0F, 0F);
parts.put("box", box);
}
public void render(TileEntityCable tile, final float scale)
{
TileEntity[] connections = tile.getConnections();
int length = tile.getNumConnections();
//GL11.glRotatef(180, -1F, 0F, 1F);
GL11.glTranslatef(0.5F, 0F, 0.5F);
GL11.glScalef(-1F, -1F, 1F);
//GL11.glRotatef(180, 1F, 0F, 0F);
if(length == 0)
{
renderStraight(ForgeDirection.NORTH, scale);
}
else if(length == 1)
{
renderSingleConnection(connections, scale);
}
else if(length == 2)
{
renderAngle(connections, scale);
}
else if(length >= 3)
{
renderConjunction(connections, scale);
}
}
protected void renderPart(String name, float scale)
{
ModelRenderer part = parts.get(name);
if(part != null)
part.render(scale);
else
ATLogger.severe("Cable part " + name + " was not found!");
part = null;
}
protected void renderWire(ForgeDirection direction, float scale)
{
renderPart(direction.name().toLowerCase() + "-left", scale);
renderPart(direction.name().toLowerCase() + "-right", scale);
}
protected void renderConnector(ForgeDirection direction, float scale)
{
renderPart("connector-" + direction.name().toLowerCase(), scale);
}
protected void renderCorner(ForgeDirection direction1, ForgeDirection direction2, float scale)
{
renderPart("corner-" + direction1.name().toLowerCase() + "-" + direction2.name().toLowerCase(), scale);
}
protected void renderCrossover(ForgeDirection to, ForgeDirection from, float scale)
{
renderPart("crossover-" + to.getOpposite().name().toLowerCase(), scale);
renderConnector(to, scale);
renderCorner(to, from.getOpposite(), scale);
}
protected void renderStraight(ForgeDirection direction, float scale)
{
renderWire(direction, scale);
renderWire(direction.getOpposite(), scale);
renderConnector(direction.getRotation(ForgeDirection.UP), scale);
renderConnector(direction.getRotation(ForgeDirection.UP).getOpposite(), scale);
}
protected void renderSingleConnection(TileEntity[] connections, float scale)
{
ForgeDirection connection = ForgeDirection.UNKNOWN;
for(int i = 0; i < connections.length; i++)
{
if(connections[i] == null)
continue;
ForgeDirection direction = ForgeDirection.getOrientation(i);
connection = direction;
break;
}
if(connection != ForgeDirection.UNKNOWN)
{
renderWire(connection, scale);
}
}
protected void renderAngle(TileEntity[] connections, float scale)
{
ForgeDirection to = ForgeDirection.UNKNOWN;
ForgeDirection from = ForgeDirection.UNKNOWN;
for(int i = 0; i < connections.length; i++)
{
if(connections[i] == null)
continue;
ForgeDirection direction = ForgeDirection.getOrientation(i);
if(to == ForgeDirection.UNKNOWN)
to = direction;
else
from = direction;
}
if(to != ForgeDirection.UNKNOWN && from != ForgeDirection.UNKNOWN)
{
if(to.getOpposite() == from)
{
renderStraight(to, scale);
}
else
{
renderWire(to, scale);
renderWire(from, scale);
if((to == ForgeDirection.SOUTH || to == ForgeDirection.NORTH) && from == to.getRotation(ForgeDirection.UP))
{
renderCrossover(to, from, scale);
renderConnector(from, scale);
}
else
{
renderConnector(to.getOpposite(), scale);
renderConnector(from.getOpposite(), scale);
renderCorner(to, from, scale);
}
}
}
}
protected void renderConjunction(TileEntity[] connections, float scale)
{
renderPart("box", scale);
for(int i = 0; i < connections.length; i++)
{
if(connections[i] == null)
continue;
ForgeDirection direction = ForgeDirection.getOrientation(i);
renderWire(direction, scale);
}
}
private static void setRotation(ModelRenderer model, float x, float y, float z)
{
model.rotateAngleX = x;
model.rotateAngleY = y;
model.rotateAngleZ = z;
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map;
import com.hazelcast.config.Config;
import com.hazelcast.config.EntryListenerConfig;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.MapConfig;
import com.hazelcast.config.MapPartitionLostListenerConfig;
import com.hazelcast.core.EntryAdapter;
import com.hazelcast.core.EntryEvent;
import com.hazelcast.core.EntryListener;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.HazelcastInstanceAware;
import com.hazelcast.core.IMap;
import com.hazelcast.core.MapEvent;
import com.hazelcast.map.impl.MapService;
import com.hazelcast.map.impl.event.MapPartitionEventData;
import com.hazelcast.map.listener.EntryAddedListener;
import com.hazelcast.map.listener.EntryUpdatedListener;
import com.hazelcast.map.listener.MapPartitionLostListener;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.DataSerializable;
import com.hazelcast.query.Predicate;
import com.hazelcast.spi.EventRegistration;
import com.hazelcast.spi.EventService;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
@SuppressWarnings("deprecation")
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class ListenerTest extends HazelcastTestSupport {
private final AtomicInteger globalCount = new AtomicInteger();
private final AtomicInteger localCount = new AtomicInteger();
private final AtomicInteger valueCount = new AtomicInteger();
@Before
public void before() {
globalCount.set(0);
localCount.set(0);
valueCount.set(0);
}
@Test
public void testConfigListenerRegistration() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
String name = randomString();
Config config = getConfig();
MapConfig mapConfig = config.getMapConfig(name);
EntryListenerConfig entryListenerConfig = new EntryListenerConfig();
entryListenerConfig.setImplementation(new EntryAdapter() {
public void entryAdded(EntryEvent event) {
latch.countDown();
}
});
mapConfig.addEntryListenerConfig(entryListenerConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<Object, Object> map = instance.getMap(name);
map.put(1, 1);
assertTrue(latch.await(10, TimeUnit.SECONDS));
}
@Test
public void globalListenerTest() throws InterruptedException {
Config config = getConfig();
String name = randomString();
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
IMap<String, String> map1 = instance1.getMap(name);
IMap<String, String> map2 = instance2.getMap(name);
map1.addEntryListener(createEntryListener(false), false);
map1.addEntryListener(createEntryListener(false), true);
map2.addEntryListener(createEntryListener(false), true);
int k = 3;
putDummyData(map1, k);
checkCountWithExpected(k * 3, 0, k * 2);
}
@Test
public void testEntryEventGetMemberNotNull() throws Exception {
Config config = getConfig();
String name = randomString();
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
IMap<String, String> map1 = instance1.getMap(name);
IMap<String, String> map2 = instance2.getMap(name);
final CountDownLatch latch = new CountDownLatch(1);
map1.addEntryListener(new EntryAdapter<Object, Object>() {
@Override
public void entryAdded(EntryEvent<Object, Object> event) {
assertNotNull(event.getMember());
latch.countDown();
}
}, false);
String key = generateKeyOwnedBy(instance2);
String value = randomString();
map2.put(key, value);
instance2.getLifecycleService().shutdown();
assertOpenEventually(latch);
}
@Test
public void globalListenerRemoveTest() throws InterruptedException {
Config config = getConfig();
String name = randomString();
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
IMap<String, String> map1 = instance1.getMap(name);
IMap<String, String> map2 = instance2.getMap(name);
String id1 = map1.addEntryListener(createEntryListener(false), false);
String id2 = map1.addEntryListener(createEntryListener(false), true);
String id3 = map2.addEntryListener(createEntryListener(false), true);
int k = 3;
map1.removeEntryListener(id1);
map1.removeEntryListener(id2);
map1.removeEntryListener(id3);
putDummyData(map2, k);
checkCountWithExpected(0, 0, 0);
}
@Test
public void localListenerTest() throws InterruptedException {
Config config = getConfig();
String name = randomString();
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
IMap<String, String> map1 = instance1.getMap(name);
IMap<String, String> map2 = instance2.getMap(name);
map1.addLocalEntryListener(createEntryListener(true));
map2.addLocalEntryListener(createEntryListener(true));
int k = 4;
putDummyData(map1, k);
checkCountWithExpected(0, k, k);
}
/**
* Test for issue 584 and 756
*/
@Test
public void globalAndLocalListenerTest() throws InterruptedException {
Config config = getConfig();
String name = randomString();
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
IMap<String, String> map1 = instance1.getMap(name);
IMap<String, String> map2 = instance2.getMap(name);
map1.addLocalEntryListener(createEntryListener(true));
map2.addLocalEntryListener(createEntryListener(true));
map1.addEntryListener(createEntryListener(false), false);
map2.addEntryListener(createEntryListener(false), false);
map2.addEntryListener(createEntryListener(false), true);
int k = 1;
putDummyData(map2, k);
checkCountWithExpected(k * 3, k, k * 2);
}
/**
* Test for issue 584 and 756
*/
@Test
public void globalAndLocalListenerTest2() throws InterruptedException {
Config config = getConfig();
String name = randomString();
TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config);
HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config);
IMap<String, String> map1 = instance1.getMap(name);
IMap<String, String> map2 = instance2.getMap(name);
// changed listener order
map1.addEntryListener(createEntryListener(false), false);
map1.addLocalEntryListener(createEntryListener(true));
map2.addEntryListener(createEntryListener(false), true);
map2.addLocalEntryListener(createEntryListener(true));
map2.addEntryListener(createEntryListener(false), false);
int k = 3;
putDummyData(map1, k);
checkCountWithExpected(k * 3, k, k * 2);
}
private static void putDummyData(IMap<String, String> map, int size) {
for (int i = 0; i < size; i++) {
map.put("foo" + i, "bar");
}
}
private void checkCountWithExpected(final int expectedGlobal, final int expectedLocal, final int expectedValue) {
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals(expectedLocal, localCount.get());
assertEquals(expectedGlobal, globalCount.get());
assertEquals(expectedValue, valueCount.get());
}
});
}
/**
* Test that replace(key, oldValue, newValue) generates entryUpdated events, not entryAdded.
*/
@Test
public void replaceFiresUpdatedEvent() throws InterruptedException {
final AtomicInteger entryUpdatedEventCount = new AtomicInteger(0);
HazelcastInstance node = createHazelcastInstance(getConfig());
IMap<Integer, Integer> map = node.getMap(randomMapName());
map.put(1, 1);
map.addEntryListener(new EntryAdapter<Integer, Integer>() {
@Override
public void entryUpdated(EntryEvent<Integer, Integer> event) {
entryUpdatedEventCount.incrementAndGet();
}
}, true);
map.replace(1, 1, 2);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals(1, entryUpdatedEventCount.get());
}
});
}
/**
* test for issue 589
*/
@Test
public void setFiresAlwaysAddEvent() throws InterruptedException {
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<Object, Object> map = instance.getMap(randomString());
final CountDownLatch updateLatch = new CountDownLatch(1);
final CountDownLatch addLatch = new CountDownLatch(1);
map.addEntryListener(new EntryAdapter<Object, Object>() {
@Override
public void entryAdded(EntryEvent<Object, Object> event) {
addLatch.countDown();
}
@Override
public void entryUpdated(EntryEvent<Object, Object> event) {
updateLatch.countDown();
}
}, false);
map.set(1, 1);
map.set(1, 2);
assertTrue(addLatch.await(5, TimeUnit.SECONDS));
assertTrue(updateLatch.await(5, TimeUnit.SECONDS));
}
@Test
public void testLocalEntryListener_singleInstance_with_MatchingPredicate() throws Exception {
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<String, String> map = instance.getMap(randomString());
boolean includeValue = false;
map.addLocalEntryListener(createEntryListener(false), matchingPredicate(), includeValue);
int count = 1000;
for (int i = 0; i < count; i++) {
map.put("key" + i, "value" + i);
}
checkCountWithExpected(count, 0, 0);
}
@Test
public void testLocalEntryListener_singleInstance_with_NonMatchingPredicate() throws Exception {
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<String, String> map = instance.getMap(randomString());
boolean includeValue = false;
map.addLocalEntryListener(createEntryListener(false), nonMatchingPredicate(), includeValue);
int count = 1000;
for (int i = 0; i < count; i++) {
map.put("key" + i, "value" + i);
}
checkCountWithExpected(0, 0, 0);
}
@Test
public void testLocalEntryListener_multipleInstance_with_MatchingPredicate() throws Exception {
int instanceCount = 3;
TestHazelcastInstanceFactory instanceFactory = createHazelcastInstanceFactory(instanceCount);
HazelcastInstance instance = instanceFactory.newInstances(getConfig())[0];
IMap<String, String> map = instance.getMap(randomString());
boolean includeValue = false;
map.addLocalEntryListener(createEntryListener(false), matchingPredicate(), includeValue);
int count = 1000;
for (int i = 0; i < count; i++) {
map.put("key" + i, "value" + i);
}
final int eventPerPartitionMin = count / instanceCount - count / 10;
final int eventPerPartitionMax = count / instanceCount + count / 10;
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(globalCount.get() > eventPerPartitionMin && globalCount.get() < eventPerPartitionMax);
}
});
}
@Test
public void testLocalEntryListener_multipleInstance_with_MatchingPredicate_and_Key() throws Exception {
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<String, String> map = instance.getMap(randomString());
boolean includeValue = false;
map.addLocalEntryListener(createEntryListener(false), matchingPredicate(), "key500", includeValue);
int count = 1000;
for (int i = 0; i < count; i++) {
map.put("key" + i, "value" + i);
}
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(globalCount.get() == 1);
}
});
}
@Test
public void testEntryListenerEvent_withMapReplaceFail() throws Exception {
HazelcastInstance instance = createHazelcastInstance(getConfig());
final IMap<Integer, Object> map = instance.getMap(randomString());
final CounterEntryListener listener = new CounterEntryListener();
map.addEntryListener(listener, true);
final int putTotal = 1000;
final int oldVal = 1;
for (int i = 0; i < putTotal; i++) {
map.put(i, oldVal);
}
final int replaceTotal = 1000;
final int newVal = 2;
for (int i = 0; i < replaceTotal; i++) {
map.replace(i, "WrongValue", newVal);
}
assertTrueEventually(new AssertTask() {
@Override
public void run() {
for (int i = 0; i < replaceTotal; i++) {
assertEquals(oldVal, map.get(i));
}
assertEquals(putTotal, listener.addCount.get());
assertEquals(0, listener.updateCount.get());
}
});
}
/**
* test for issue 3198
*/
@Test
public void testEntryListenerEvent_getValueWhenEntryRemoved() {
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<String, String> map = instance.getMap(randomString());
final AtomicReference<String> valueRef = new AtomicReference<String>();
final AtomicReference<String> oldValueRef = new AtomicReference<String>();
final CountDownLatch latch = new CountDownLatch(1);
map.addEntryListener(new EntryAdapter<String, String>() {
public void entryRemoved(EntryEvent<String, String> event) {
valueRef.set(event.getValue());
oldValueRef.set(event.getOldValue());
latch.countDown();
}
}, true);
map.put("key", "value");
map.remove("key");
assertOpenEventually(latch);
assertNull(valueRef.get());
assertEquals("value", oldValueRef.get());
}
@Test
public void testEntryListenerEvent_getValueWhenEntryEvicted() {
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<String, String> map = instance.getMap(randomString());
final Object[] value = new Object[1];
final Object[] oldValue = new Object[1];
final CountDownLatch latch = new CountDownLatch(1);
map.addEntryListener(new EntryAdapter<String, String>() {
public void entryEvicted(EntryEvent<String, String> event) {
value[0] = event.getValue();
oldValue[0] = event.getOldValue();
latch.countDown();
}
}, true);
map.put("key", "value", 1, TimeUnit.SECONDS);
assertOpenEventually(latch);
assertNull(value[0]);
assertEquals("value", oldValue[0]);
}
@Test
public void testEntryListenerEvent_withMapReplaceSuccess() throws Exception {
HazelcastInstance instance = createHazelcastInstance(getConfig());
final IMap<Integer, Object> map = instance.getMap(randomString());
final CounterEntryListener listener = new CounterEntryListener();
map.addEntryListener(listener, true);
final int putTotal = 1000;
final int oldVal = 1;
for (int i = 0; i < putTotal; i++) {
map.put(i, oldVal);
}
final int replaceTotal = 1000;
final int newVal = 2;
for (int i = 0; i < replaceTotal; i++) {
map.replace(i, oldVal, newVal);
}
assertTrueEventually(new AssertTask() {
@Override
public void run() {
for (int i = 0; i < replaceTotal; i++) {
assertEquals(newVal, map.get(i));
}
assertEquals(putTotal, listener.addCount.get());
assertEquals(replaceTotal, listener.updateCount.get());
}
});
}
/**
* test for issue 4037
*/
@Test
public void testEntryEvent_includesOldValue_afterRemoveIfSameOperation() {
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<String, String> map = instance.getMap(randomString());
final CountDownLatch latch = new CountDownLatch(1);
final String key = "key";
final String value = "value";
final ConcurrentMap<String, String> resultHolder = new ConcurrentHashMap<String, String>(1);
map.addEntryListener(new EntryAdapter<String, String>() {
public void entryRemoved(EntryEvent<String, String> event) {
final String oldValue = event.getOldValue();
resultHolder.put(key, oldValue);
latch.countDown();
}
}, true);
map.put(key, value);
map.remove(key, value);
assertOpenEventually(latch);
final String oldValueFromEntryEvent = resultHolder.get(key);
assertEquals(value, oldValueFromEntryEvent);
}
@Test
public void testMapPartitionLostListener_registeredViaImplementationInConfigObject() {
final String name = randomString();
Config config = getConfig();
MapConfig mapConfig = config.getMapConfig(name);
MapPartitionLostListener listener = mock(MapPartitionLostListener.class);
mapConfig.addMapPartitionLostListenerConfig(new MapPartitionLostListenerConfig(listener));
mapConfig.setBackupCount(0);
HazelcastInstance instance = createHazelcastInstance(config);
instance.getMap(name);
final EventService eventService = getNode(instance).getNodeEngine().getEventService();
assertTrueEventually(new AssertTask() {
@Override
public void run()
throws Exception {
final Collection<EventRegistration> registrations = eventService.getRegistrations(MapService.SERVICE_NAME, name);
assertFalse(registrations.isEmpty());
}
});
}
@Test
public void testPutAll_whenExistsEntryListenerWithIncludeValueSetToTrue_thenFireEventWithValue() throws InterruptedException {
int key = 1;
String initialValue = "foo";
String newValue = "bar";
HazelcastInstance instance = createHazelcastInstance(getConfig());
IMap<Integer, String> map = instance.getMap(randomMapName());
map.put(key, initialValue);
UpdateListenerRecordingOldValue<Integer, String> listener = new UpdateListenerRecordingOldValue<Integer, String>();
map.addEntryListener(listener, true);
Map<Integer, String> newMap = createMapWithEntry(key, newValue);
map.putAll(newMap);
String oldValue = listener.waitForOldValue();
assertEquals(initialValue, oldValue);
}
@Test
public void hazelcastAwareEntryListener_whenConfiguredViaClassName_thenInjectHazelcastInstance() throws InterruptedException {
EntryListenerConfig listenerConfig = new EntryListenerConfig("com.hazelcast.map.ListenerTest$PingPongListener", false, true);
hazelcastAwareEntryListener_injectHazelcastInstance(listenerConfig);
}
@Test
public void hazelcastAwareEntryListener_whenConfiguredByProvidingInstance_thenInjectHazelcastInstance() throws InterruptedException {
EntryListenerConfig listenerConfig = new EntryListenerConfig(new PingPongListener(), false, true);
hazelcastAwareEntryListener_injectHazelcastInstance(listenerConfig);
}
@Test
public void test_ListenerShouldNotCauseDeserialization_withIncludeValueFalse() throws InterruptedException {
String name = randomString();
String key = randomString();
Config config = getConfig();
config.getMapConfig(name).setInMemoryFormat(InMemoryFormat.OBJECT);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<Object, Object> map = instance.getMap(name);
EntryAddedLatch latch = new EntryAddedLatch(1);
map.addEntryListener(latch, false);
map.executeOnKey(key, new AbstractEntryProcessor<Object, Object>() {
@Override
public Object process(Map.Entry<Object, Object> entry) {
entry.setValue(new SerializeCheckerObject());
return null;
}
});
assertOpenEventually(latch, 10);
SerializeCheckerObject.assertNotSerialized();
}
private static class EntryAddedLatch extends CountDownLatch implements EntryAddedListener {
EntryAddedLatch(int count) {
super(count);
}
@Override
public void entryAdded(EntryEvent event) {
countDown();
}
}
private static class SerializeCheckerObject implements DataSerializable {
static volatile boolean serialized = false;
static volatile boolean deserialized = false;
@Override
public void writeData(ObjectDataOutput out) throws IOException {
serialized = true;
}
@Override
public void readData(ObjectDataInput in) throws IOException {
deserialized = true;
}
static void assertNotSerialized() {
assertFalse(serialized);
assertFalse(deserialized);
}
}
@Test
public void test_mapPartitionEventData_toString() {
assertNotNull(new MapPartitionEventData().toString());
}
@Test
public void updates_with_putTransient_triggers_entryUpdatedListener() throws Exception {
HazelcastInstance hz = createHazelcastInstance(getConfig());
IMap<String, String> map = hz.getMap("updates_with_putTransient_triggers_entryUpdatedListener");
final CountDownLatch updateEventCounterLatch = new CountDownLatch(1);
map.addEntryListener(new EntryUpdatedListener<String, String>() {
@Override
public void entryUpdated(EntryEvent<String, String> event) {
updateEventCounterLatch.countDown();
}
}, true);
map.putTransient("hello", "world", 0, TimeUnit.SECONDS);
map.putTransient("hello", "another world", 0, TimeUnit.SECONDS);
assertOpenEventually(updateEventCounterLatch);
}
private <K, V> Map<K, V> createMapWithEntry(K key, V newValue) {
Map<K, V> map = new HashMap<K, V>();
map.put(key, newValue);
return map;
}
private Predicate<String, String> matchingPredicate() {
return new Predicate<String, String>() {
@Override
public boolean apply(Map.Entry<String, String> mapEntry) {
return true;
}
};
}
private Predicate<String, String> nonMatchingPredicate() {
return new Predicate<String, String>() {
@Override
public boolean apply(Map.Entry<String, String> mapEntry) {
return false;
}
};
}
private class UpdateListenerRecordingOldValue<K, V> implements EntryUpdatedListener<K, V> {
private volatile V oldValue;
private final CountDownLatch latch = new CountDownLatch(1);
V waitForOldValue() throws InterruptedException {
latch.await();
return oldValue;
}
@Override
public void entryUpdated(EntryEvent<K, V> event) {
oldValue = event.getOldValue();
latch.countDown();
}
}
private EntryListener<String, String> createEntryListener(final boolean isLocal) {
return new EntryAdapter<String, String>() {
private final boolean local = isLocal;
public void entryAdded(EntryEvent<String, String> event) {
if (local) {
localCount.incrementAndGet();
} else {
globalCount.incrementAndGet();
}
if (event.getValue() != null) {
valueCount.incrementAndGet();
}
}
};
}
private void hazelcastAwareEntryListener_injectHazelcastInstance(EntryListenerConfig listenerConfig) {
String pingMapName = randomMapName();
Config config = getConfig();
config.getMapConfig(pingMapName).getEntryListenerConfigs().add(listenerConfig);
HazelcastInstance instance = createHazelcastInstance(config);
IMap<Integer, String> pingMap = instance.getMap(pingMapName);
String pongMapName = randomMapName();
pingMap.put(0, pongMapName);
IMap<Integer, String> outputMap = instance.getMap(pongMapName);
assertSizeEventually(1, outputMap);
}
public static class PingPongListener implements EntryListener<Integer, String>, HazelcastInstanceAware {
private HazelcastInstance instance;
@Override
public void setHazelcastInstance(HazelcastInstance instance) {
this.instance = instance;
}
@Override
public void entryAdded(EntryEvent<Integer, String> event) {
String outputMapName = event.getValue();
IMap<Integer, String> outputMap = instance.getMap(outputMapName);
outputMap.putAsync(0, "pong");
}
@Override
public void entryEvicted(EntryEvent<Integer, String> event) {
}
@Override
public void entryRemoved(EntryEvent<Integer, String> event) {
}
@Override
public void entryUpdated(EntryEvent<Integer, String> event) {
}
@Override
public void mapCleared(MapEvent event) {
}
@Override
public void mapEvicted(MapEvent event) {
}
}
public class CounterEntryListener implements EntryListener<Object, Object> {
final AtomicLong addCount = new AtomicLong();
final AtomicLong removeCount = new AtomicLong();
final AtomicLong updateCount = new AtomicLong();
final AtomicLong evictCount = new AtomicLong();
public CounterEntryListener() {
}
@Override
public void entryAdded(EntryEvent<Object, Object> objectObjectEntryEvent) {
addCount.incrementAndGet();
}
@Override
public void entryRemoved(EntryEvent<Object, Object> objectObjectEntryEvent) {
removeCount.incrementAndGet();
}
@Override
public void entryUpdated(EntryEvent<Object, Object> objectObjectEntryEvent) {
updateCount.incrementAndGet();
}
@Override
public void entryEvicted(EntryEvent<Object, Object> objectObjectEntryEvent) {
evictCount.incrementAndGet();
}
@Override
public void mapEvicted(MapEvent event) {
}
@Override
public void mapCleared(MapEvent event) {
}
@Override
public String toString() {
return "EntryCounter{" +
"addCount=" + addCount +
", removeCount=" + removeCount +
", updateCount=" + updateCount +
", evictCount=" + evictCount +
'}';
}
}
}
| |
/**
* ****************************************************************************
* Copyright (c) 2010-2015 by Min Cai (min.cai.china@gmail.com).
* <p>
* This file is part of the Archimulator multicore architectural simulator.
* <p>
* Archimulator is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p>
* Archimulator is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p>
* You should have received a copy of the GNU General Public License
* along with Archimulator. If not, see <http://www.gnu.org/licenses/>.
* ****************************************************************************
*/
package archimulator.core;
import archimulator.core.functionalUnit.FunctionalUnitOperationType;
import archimulator.isa.StaticInstructionType;
import archimulator.os.ContextState;
import archimulator.util.Reference;
import archimulator.util.RoundRobinScheduler;
import java.util.Iterator;
import java.util.List;
/**
* Basic core.
*
* @author Min Cai
*/
public class BasicCore extends AbstractBasicCore {
private RoundRobinScheduler<Thread> registerRenameScheduler;
private RoundRobinScheduler<Thread> dispatchScheduler;
/**
* Create a basic core.
*
* @param processor the parent processor
* @param num the number of the core
*/
public BasicCore(Processor processor, int num) {
super(processor, num);
this.registerRenameScheduler = new RoundRobinScheduler<>(
this.threads,
thread -> {
if (thread.getContext() == null) {
return false;
} else if (thread.getDecodeBuffer().isEmpty()) {
thread.incrementNumRegisterRenameStallsOnDecodeBufferIsEmpty();
return false;
} else if (thread.getReorderBuffer().isFull()) {
thread.incrementNumRegisterRenameStallsOnReorderBufferIsFull();
return false;
} else {
return true;
}
},
Thread::registerRenameOne, getExperiment().getDecodeWidth()
);
this.dispatchScheduler = new RoundRobinScheduler<>(
this.threads,
thread -> thread.getContext() != null,
Thread::dispatchOne,
getExperiment().getDecodeWidth()
);
}
@Override
protected void fetch() {
this.threads.stream().filter(
thread -> thread.getContext() != null && thread.getContext().getState() == ContextState.RUNNING
).forEach(Thread::fetch);
}
@Override
protected void registerRename() {
this.registerRenameScheduler.consumeNext();
}
@Override
protected void dispatch() {
this.dispatchScheduler.consumeNext();
}
@Override
protected void wakeUp() {
this.wakeUp(this.waitingInstructionQueue, this.readyInstructionQueue);
this.wakeUp(this.waitingStoreQueue, this.readyStoreQueue);
}
/**
* Wake up.
*
* @param waitingQueue the waiting queue
* @param readyQueue the ready queue
*/
private void wakeUp(List<AbstractReorderBufferEntry> waitingQueue, List<AbstractReorderBufferEntry> readyQueue) {
waitingQueue.stream().filter(AbstractReorderBufferEntry::isAllOperandReady).forEach(readyQueue::add);
waitingQueue.removeIf(AbstractReorderBufferEntry::isAllOperandReady);
}
@Override
protected void issue() {
Reference<Integer> quant = new Reference<>(getExperiment().getIssueWidth());
this.issueInstructionQueue(quant);
this.issueLoadQueue(quant);
this.issueStoreQueue(quant);
}
/**
* Issue the instruction queue.
*
* @param quant the quant
*/
private void issueInstructionQueue(Reference<Integer> quant) {
for (Iterator<AbstractReorderBufferEntry> it = this.readyInstructionQueue.iterator(); quant.get() > 0 && it.hasNext(); ) {
final ReorderBufferEntry reorderBufferEntry = (ReorderBufferEntry) it.next();
if (reorderBufferEntry.getDynamicInstruction().getStaticInstruction().getMnemonic().getFunctionalUnitOperationType() != FunctionalUnitOperationType.NONE) {
if (this.functionalUnitPool.acquire(reorderBufferEntry, reorderBufferEntry::signalCompleted)) {
reorderBufferEntry.setIssued();
} else {
reorderBufferEntry.getThread().incrementNumSelectionStallsOnNoFreeFunctionalUnit();
continue;
}
} else {
reorderBufferEntry.setIssued();
reorderBufferEntry.setCompleted();
reorderBufferEntry.writeBack();
}
it.remove();
quant.set(quant.get() - 1);
}
}
/**
* Issue the load queue.
*
* @param quant the quant
*/
private void issueLoadQueue(Reference<Integer> quant) {
for (Iterator<AbstractReorderBufferEntry> it = this.readyLoadQueue.iterator(); quant.get() > 0 && it.hasNext(); ) {
final LoadStoreQueueEntry loadStoreQueueEntry = (LoadStoreQueueEntry) it.next();
boolean hitInLoadStoreQueue = loadStoreQueueEntry.getThread().getLoadStoreQueue().getEntries().stream().anyMatch(
loadStoreQueueEntryFound
-> loadStoreQueueEntryFound.getDynamicInstruction().getStaticInstruction().getMnemonic().getType() == StaticInstructionType.STORE
&& loadStoreQueueEntryFound.getEffectiveAddress() == loadStoreQueueEntry.getEffectiveAddress()
);
if (hitInLoadStoreQueue) {
loadStoreQueueEntry.setIssued();
loadStoreQueueEntry.signalCompleted();
} else {
if (!this.canLoad(loadStoreQueueEntry.getThread(), loadStoreQueueEntry.getEffectiveAddress())) {
loadStoreQueueEntry.getThread().incrementNumSelectionStallsOnCanNotLoad();
break;
}
this.load(
loadStoreQueueEntry.getDynamicInstruction(),
loadStoreQueueEntry.getEffectiveAddress(),
loadStoreQueueEntry.getDynamicInstruction().getPc(),
loadStoreQueueEntry::signalCompleted
);
loadStoreQueueEntry.setIssued();
}
it.remove();
quant.set(quant.get() - 1);
}
}
/**
* Issue the store queue.
*
* @param quant the store queue
*/
private void issueStoreQueue(Reference<Integer> quant) {
for (Iterator<AbstractReorderBufferEntry> it = this.readyStoreQueue.iterator(); quant.get() > 0 && it.hasNext(); ) {
final LoadStoreQueueEntry loadStoreQueueEntry = (LoadStoreQueueEntry) it.next();
if (!this.canStore(loadStoreQueueEntry.getThread(), loadStoreQueueEntry.getEffectiveAddress())) {
loadStoreQueueEntry.getThread().incrementNumSelectionStallsOnCanNotStore();
break;
}
this.store(loadStoreQueueEntry.getDynamicInstruction(), loadStoreQueueEntry.getEffectiveAddress(), loadStoreQueueEntry.getDynamicInstruction().getPc(), () -> {
// loadStoreQueueEntry.signalCompleted(); //TODO: should we need to wait for store to complete?
});
loadStoreQueueEntry.setIssued();
loadStoreQueueEntry.signalCompleted(); //TODO: should we need to wait for store to complete?
it.remove();
quant.set(quant.get() - 1);
}
}
@Override
protected void writeBack() {
for (AbstractReorderBufferEntry reorderBufferEntry : this.oooEventQueue) {
reorderBufferEntry.setCompleted();
reorderBufferEntry.writeBack();
}
this.oooEventQueue.clear();
}
@Override
protected void refreshLoadStoreQueue() {
this.threads.stream().filter(thread -> thread.getContext() != null).forEach(Thread::refreshLoadStoreQueue);
}
@Override
protected void commit() {
this.threads.stream().filter(thread -> thread.getContext() != null).forEach(Thread::commit);
}
}
| |
/*
* Copyright (C) 2011-2016 Markus Junginger, greenrobot (http://greenrobot.org)
*
* This file is part of greenDAO Generator.
*
* greenDAO Generator is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* greenDAO Generator is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with greenDAO Generator. If not, see <http://www.gnu.org/licenses/>.
*/
package org.greenrobot.greendao.generator;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateNotFoundException;
/**
* Once you have your model created, use this class to generate entities and DAOs.
*
* @author Markus
*/
public class DaoGenerator {
private Pattern patternKeepIncludes;
private Pattern patternKeepFields;
private Pattern patternKeepMethods;
private Template templateDao;
private Template templateDaoMaster;
private Template templateDaoSession;
private Template templateEntity;
private Template templateDaoUnitTest;
private Template templateContentProvider;
public DaoGenerator() throws IOException {
System.out.println("greenDAO Generator");
System.out.println("Copyright 2011-2016 Markus Junginger, greenrobot.de. Licensed under GPL V3.");
System.out.println("This program comes with ABSOLUTELY NO WARRANTY");
patternKeepIncludes = compilePattern("INCLUDES");
patternKeepFields = compilePattern("FIELDS");
patternKeepMethods = compilePattern("METHODS");
Configuration config = getConfiguration("dao.ftl");
templateDao = config.getTemplate("dao.ftl");
templateDaoMaster = config.getTemplate("dao-master.ftl");
templateDaoSession = config.getTemplate("dao-session.ftl");
templateEntity = config.getTemplate("entity.ftl");
templateDaoUnitTest = config.getTemplate("dao-unit-test.ftl");
templateContentProvider = config.getTemplate("content-provider.ftl");
}
private Configuration getConfiguration(String probingTemplate) throws IOException {
Configuration config = new Configuration(Configuration.VERSION_2_3_23);
//config.setClassForTemplateLoading(getClass(), "/");
//Bug: ftl-templates not found
config.setDirectoryForTemplateLoading(new File("./DaoGenerator/src-template/"));
try {
config.getTemplate(probingTemplate);
} catch (TemplateNotFoundException e) {
// When running from an IDE like IntelliJ, class loading resources may fail for some reason (Gradle is OK)
// Working dir is module dir
File dir = new File("src/main/resources/");
if (!dir.exists()) {
// Working dir is base module dir
dir = new File("DaoGenerator/src/main/resources/");
}
if (dir.exists() && new File(dir, probingTemplate).exists()) {
config.setDirectoryForTemplateLoading(dir);
config.getTemplate(probingTemplate);
} else {
throw e;
}
}
return config;
}
private Pattern compilePattern(String sectionName) {
int flags = Pattern.DOTALL | Pattern.MULTILINE;
return Pattern.compile(".*^\\s*?//\\s*?KEEP " + sectionName + ".*?\n(.*?)^\\s*// KEEP " + sectionName
+ " END.*?\n", flags);
}
/** Generates all entities and DAOs for the given schema. */
public void generateAll(Schema schema, String outDir) throws Exception {
generateAll(schema, outDir, null, null);
}
/** Generates all entities and DAOs for the given schema. */
public void generateAll(Schema schema, String outDir, String outDirEntity, String outDirTest) throws Exception {
long start = System.currentTimeMillis();
File outDirFile = toFileForceExists(outDir);
File outDirEntityFile = outDirEntity != null ? toFileForceExists(outDirEntity) : outDirFile;
File outDirTestFile = outDirTest != null ? toFileForceExists(outDirTest) : null;
schema.init2ndPass();
schema.init3rdPass();
System.out.println("Processing schema version " + schema.getVersion() + "...");
List<Entity> entities = schema.getEntities();
for (Entity entity : entities) {
generate(templateDao, outDirFile, entity.getJavaPackageDao(), entity.getClassNameDao(), schema, entity);
if (!entity.isProtobuf() && !entity.isSkipGeneration()) {
generate(templateEntity, outDirEntityFile, entity.getJavaPackage(), entity.getClassName(), schema, entity);
}
if (outDirTestFile != null && !entity.isSkipGenerationTest()) {
String javaPackageTest = entity.getJavaPackageTest();
String classNameTest = entity.getClassNameTest();
File javaFilename = toJavaFilename(outDirTestFile, javaPackageTest, classNameTest);
if (!javaFilename.exists()) {
generate(templateDaoUnitTest, outDirTestFile, javaPackageTest, classNameTest, schema, entity);
} else {
System.out.println("Skipped " + javaFilename.getCanonicalPath());
}
}
for (ContentProvider contentProvider : entity.getContentProviders()) {
Map<String, Object> additionalObjectsForTemplate = new HashMap<>();
additionalObjectsForTemplate.put("contentProvider", contentProvider);
generate(templateContentProvider, outDirFile, entity.getJavaPackage(), entity.getClassName()
+ "ContentProvider", schema, entity, additionalObjectsForTemplate);
}
}
generate(templateDaoMaster, outDirFile, schema.getDefaultJavaPackageDao(),
schema.getPrefix() + "DaoMaster", schema, null);
generate(templateDaoSession, outDirFile, schema.getDefaultJavaPackageDao(),
schema.getPrefix() + "DaoSession", schema, null);
long time = System.currentTimeMillis() - start;
System.out.println("Processed " + entities.size() + " entities in " + time + "ms");
}
protected File toFileForceExists(String filename) throws IOException {
File file = new File(filename);
if (!file.exists()) {
throw new IOException(filename
+ " does not exist. This check is to prevent accidental file generation into a wrong path.");
}
return file;
}
private void generate(Template template, File outDirFile, String javaPackage, String javaClassName, Schema schema,
Entity entity) throws Exception {
generate(template, outDirFile, javaPackage, javaClassName, schema, entity, null);
}
private void generate(Template template, File outDirFile, String javaPackage, String javaClassName, Schema schema,
Entity entity, Map<String, Object> additionalObjectsForTemplate) throws Exception {
Map<String, Object> root = new HashMap<>();
root.put("schema", schema);
root.put("entity", entity);
if (additionalObjectsForTemplate != null) {
root.putAll(additionalObjectsForTemplate);
}
try {
File file = toJavaFilename(outDirFile, javaPackage, javaClassName);
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
if (entity != null && entity.getHasKeepSections()) {
checkKeepSections(file, root);
}
Writer writer = new FileWriter(file);
try {
template.process(root, writer);
writer.flush();
System.out.println("Written " + file.getCanonicalPath());
} finally {
writer.close();
}
} catch (Exception ex) {
System.err.println("Data map for template: " + root);
System.err.println("Error while generating " + javaPackage + "." + javaClassName + " ("
+ outDirFile.getCanonicalPath() + ")");
throw ex;
}
}
private void checkKeepSections(File file, Map<String, Object> root) {
if (file.exists()) {
try {
String contents = new String(DaoUtil.readAllBytes(file));
Matcher matcher;
matcher = patternKeepIncludes.matcher(contents);
if (matcher.matches()) {
root.put("keepIncludes", matcher.group(1));
}
matcher = patternKeepFields.matcher(contents);
if (matcher.matches()) {
root.put("keepFields", matcher.group(1));
}
matcher = patternKeepMethods.matcher(contents);
if (matcher.matches()) {
root.put("keepMethods", matcher.group(1));
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
protected File toJavaFilename(File outDirFile, String javaPackage, String javaClassName) {
String packageSubPath = javaPackage.replace('.', '/');
File packagePath = new File(outDirFile, packageSubPath);
return new File(packagePath, javaClassName + ".java");
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.psi.impl;
import com.google.common.collect.Maps;
import com.intellij.extapi.psi.PsiFileBase;
import com.intellij.icons.AllIcons;
import com.intellij.lang.Language;
import com.intellij.navigation.ItemPresentation;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.RecursionManager;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.scope.DelegatingScopeProcessor;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.stubs.StubElement;
import com.intellij.psi.templateLanguages.TemplateLanguageFileViewProvider;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.QualifiedName;
import com.intellij.reference.SoftReference;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.IndexingDataKeys;
import com.jetbrains.python.PyElementTypes;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PythonFileType;
import com.jetbrains.python.PythonLanguage;
import com.jetbrains.python.codeInsight.controlflow.ControlFlowCache;
import com.jetbrains.python.codeInsight.imports.AddImportHelper;
import com.jetbrains.python.documentation.docstrings.DocStringUtil;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.references.PyReferenceImpl;
import com.jetbrains.python.psi.resolve.*;
import com.jetbrains.python.psi.stubs.PyFileStub;
import com.jetbrains.python.psi.types.PyModuleType;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.util.*;
public class PyFileImpl extends PsiFileBase implements PyFile, PyExpression {
@Nullable protected volatile PyType myType;
//private volatile Boolean myAbsoluteImportEnabled;
private final Map<FutureFeature, Boolean> myFutureFeatures;
@Nullable private volatile List<String> myDunderAll;
private volatile boolean myDunderAllCalculated;
@NotNull private volatile SoftReference<ExportedNameCache> myExportedNameCache = new SoftReference<>(null);
@NotNull private final PsiModificationTracker myModificationTracker;
private final class ExportedNameCache {
private final List<String> myNameDefinerNegativeCache = new ArrayList<>();
private long myNameDefinerOOCBModCount = -1;
private final long myModificationStamp;
private final Map<String, List<PsiNamedElement>> myNamedElements = Maps.newHashMap();
private final List<PyImportedNameDefiner> myImportedNameDefiners = new ArrayList<>();
private ExportedNameCache(long modificationStamp) {
myModificationStamp = modificationStamp;
final StubElement stub = getStub();
processDeclarations(PyPsiUtils.collectAllStubChildren(PyFileImpl.this, stub), element -> {
if (element instanceof PsiNamedElement &&
!(element instanceof PyKeywordArgument) &&
!(stub == null && element.getParent() instanceof PyImportElement)) {
final PsiNamedElement namedElement = (PsiNamedElement)element;
myNamedElements.computeIfAbsent(namedElement.getName(), __ -> new ArrayList<>()).add(namedElement);
}
if (element instanceof PyImportedNameDefiner) {
myImportedNameDefiners.add((PyImportedNameDefiner)element);
}
if (element instanceof PyFromImportStatement) {
final PyFromImportStatement fromImportStatement = (PyFromImportStatement)element;
final PyStarImportElement starImportElement = fromImportStatement.getStarImportElement();
if (starImportElement != null) {
myImportedNameDefiners.add(starImportElement);
}
else {
Collections.addAll(myImportedNameDefiners, fromImportStatement.getImportElements());
}
}
else if (element instanceof PyImportStatement) {
final PyImportStatement importStatement = (PyImportStatement)element;
Collections.addAll(myImportedNameDefiners, importStatement.getImportElements());
}
return true;
});
for (List<PsiNamedElement> elements : myNamedElements.values()) {
Collections.reverse(elements);
}
Collections.reverse(myImportedNameDefiners);
}
private boolean processDeclarations(@NotNull List<PsiElement> elements, @NotNull Processor<? super PsiElement> processor) {
for (PsiElement child : elements) {
if (!processor.process(child)) {
return false;
}
if (child instanceof PyExceptPart) {
final PyExceptPart part = (PyExceptPart)child;
if (!processDeclarations(PyPsiUtils.collectAllStubChildren(part, part.getStub()), processor)) {
return false;
}
}
}
return true;
}
@NotNull
private List<RatedResolveResult> multiResolve(@NotNull String name) {
synchronized (myNameDefinerNegativeCache) {
final long modCount = myModificationTracker.getModificationCount();
if (modCount != myNameDefinerOOCBModCount) {
myNameDefinerNegativeCache.clear();
myNameDefinerOOCBModCount = modCount;
}
else {
if (myNameDefinerNegativeCache.contains(name)) {
return Collections.emptyList();
}
}
}
final PyResolveProcessor processor = new PyResolveProcessor(name);
boolean stopped = false;
if (myNamedElements.containsKey(name)) {
for (PsiNamedElement element : myNamedElements.get(name)) {
if (!processor.execute(element, ResolveState.initial())) {
stopped = true;
break;
}
}
}
if (!stopped) {
for (PyImportedNameDefiner definer : myImportedNameDefiners) {
if (!processor.execute(definer, ResolveState.initial())) {
break;
}
}
}
final Map<PsiElement, PyImportedNameDefiner> results = processor.getResults();
if (!results.isEmpty()) {
final ResolveResultList resultList = new ResolveResultList();
final TypeEvalContext typeEvalContext = TypeEvalContext.codeInsightFallback(getProject());
for (Map.Entry<PsiElement, PyImportedNameDefiner> entry : results.entrySet()) {
final PsiElement element = entry.getKey();
final PyImportedNameDefiner definer = entry.getValue();
if (element != null) {
final int elementRate = PyReferenceImpl.getRate(element, typeEvalContext);
if (definer != null) {
resultList.add(new ImportedResolveResult(element, elementRate, definer));
}
else {
resultList.poke(element, elementRate);
}
}
}
return resultList;
}
synchronized (myNameDefinerNegativeCache) {
myNameDefinerNegativeCache.add(name);
}
return Collections.emptyList();
}
public long getModificationStamp() {
return myModificationStamp;
}
}
public PyFileImpl(FileViewProvider viewProvider) {
this(viewProvider, PythonLanguage.getInstance());
}
public PyFileImpl(FileViewProvider viewProvider, Language language) {
super(viewProvider, language);
myFutureFeatures = new HashMap<>();
myModificationTracker = PsiModificationTracker.SERVICE.getInstance(getProject());
}
@Override
@NotNull
public FileType getFileType() {
return PythonFileType.INSTANCE;
}
@Override
public String toString() {
return "PyFile:" + getName();
}
@Override
public PyFunction findTopLevelFunction(@NotNull String name) {
return findByName(name, getTopLevelFunctions());
}
@Override
public PyClass findTopLevelClass(@NotNull String name) {
return findByName(name, getTopLevelClasses());
}
@Override
public PyTargetExpression findTopLevelAttribute(@NotNull String name) {
return findByName(name, getTopLevelAttributes());
}
@Nullable
private static <T extends PsiNamedElement> T findByName(@NotNull String name, @NotNull List<T> namedElements) {
for (T namedElement : namedElements) {
if (name.equals(namedElement.getName())) {
return namedElement;
}
}
return null;
}
@Override
public LanguageLevel getLanguageLevel() {
if (myOriginalFile != null) {
PsiFile originalPythonFile = myOriginalFile;
// myOriginalFile could be an instance of base language
// see PostfixLiveTemplate#copyFile
if (myOriginalFile.getViewProvider() instanceof TemplateLanguageFileViewProvider) {
originalPythonFile = myOriginalFile.getViewProvider().getPsi(PythonLanguage.getInstance());
}
if (originalPythonFile instanceof PyFile) {
return ((PyFile)originalPythonFile).getLanguageLevel();
}
}
VirtualFile virtualFile = getVirtualFile();
if (virtualFile == null) {
virtualFile = getUserData(IndexingDataKeys.VIRTUAL_FILE);
}
if (virtualFile == null) {
virtualFile = getViewProvider().getVirtualFile();
}
return PythonLanguageLevelPusher.getLanguageLevelForVirtualFile(getProject(), virtualFile);
}
@Override
public Icon getIcon(int flags) {
return PythonFileType.INSTANCE.getIcon();
}
@Override
public void accept(@NotNull PsiElementVisitor visitor) {
if (isAcceptedFor(visitor.getClass())) {
if (visitor instanceof PyElementVisitor) {
((PyElementVisitor)visitor).visitPyFile(this);
}
else {
super.accept(visitor);
}
}
}
//
//@Override
//public PsiElement getNavigationElement() {
// final PsiElement element = PyiUtil.getOriginalElement(this);
// return element != null ? element : super.getNavigationElement();
//}
public boolean isAcceptedFor(@NotNull Class visitorClass) {
for (Language lang : getViewProvider().getLanguages()) {
final List<PythonVisitorFilter> filters = PythonVisitorFilter.INSTANCE.allForLanguage(lang);
for (PythonVisitorFilter filter : filters) {
if (!filter.isSupported(visitorClass, this)) {
return false;
}
}
}
return true;
}
private final Key<Set<PyFile>> PROCESSED_FILES = Key.create("PyFileImpl.processDeclarations.processedFiles");
@Override
public boolean processDeclarations(@NotNull final PsiScopeProcessor processor,
@NotNull ResolveState resolveState,
PsiElement lastParent,
@NotNull PsiElement place) {
final List<String> dunderAll = getDunderAll();
final List<String> remainingDunderAll = dunderAll == null ? null : new ArrayList<>(dunderAll);
PsiScopeProcessor wrapper = new DelegatingScopeProcessor(processor) {
@Override
public boolean execute(@NotNull PsiElement element, @NotNull ResolveState state) {
if (!super.execute(element, state)) return false;
if (remainingDunderAll != null && element instanceof PyElement) {
remainingDunderAll.remove(((PyElement)element).getName());
}
return true;
}
};
Set<PyFile> pyFiles = resolveState.get(PROCESSED_FILES);
if (pyFiles == null) {
pyFiles = new HashSet<>();
resolveState = resolveState.put(PROCESSED_FILES, pyFiles);
}
if (pyFiles.contains(this)) return true;
pyFiles.add(this);
for (PyClass c : getTopLevelClasses()) {
if (c == lastParent) continue;
if (!wrapper.execute(c, resolveState)) return false;
}
for (PyFunction f : getTopLevelFunctions()) {
if (f == lastParent) continue;
if (!wrapper.execute(f, resolveState)) return false;
}
for (PyTargetExpression e : getTopLevelAttributes()) {
if (e == lastParent) continue;
if (!wrapper.execute(e, resolveState)) return false;
}
for (PyImportElement e : getImportTargets()) {
if (e == lastParent) continue;
if (!wrapper.execute(e, resolveState)) return false;
}
for (PyFromImportStatement e : getFromImports()) {
if (e == lastParent) continue;
if (!e.processDeclarations(wrapper, resolveState, null, this)) return false;
}
if (remainingDunderAll != null) {
for (String s : remainingDunderAll) {
if (!PyNames.isIdentifier(s)) {
continue;
}
if (!processor.execute(new LightNamedElement(myManager, PythonLanguage.getInstance(), s), resolveState)) return false;
}
}
return true;
}
@Override
public List<PyStatement> getStatements() {
List<PyStatement> stmts = new ArrayList<>();
for (PsiElement child : getChildren()) {
if (child instanceof PyStatement) {
PyStatement statement = (PyStatement)child;
stmts.add(statement);
}
}
return stmts;
}
@Override
@NotNull
public List<PyClass> getTopLevelClasses() {
return PyPsiUtils.collectStubChildren(this, getStub(), PyElementTypes.CLASS_DECLARATION);
}
@NotNull
@Override
public List<PyFunction> getTopLevelFunctions() {
return PyPsiUtils.collectStubChildren(this, getStub(), PyElementTypes.FUNCTION_DECLARATION);
}
@Override
public List<PyTargetExpression> getTopLevelAttributes() {
return PyPsiUtils.collectStubChildren(this, getStub(), PyElementTypes.TARGET_EXPRESSION);
}
@Override
@Nullable
public PsiElement findExportedName(final String name) {
final List<RatedResolveResult> results = multiResolveName(name);
final List<PsiElement> elements = new ArrayList<>();
for (RatedResolveResult result : results) {
final PsiElement element = result.getElement();
final ImportedResolveResult importedResult = PyUtil.as(result, ImportedResolveResult.class);
if (importedResult != null) {
final PyImportedNameDefiner definer = importedResult.getDefiner();
if (definer != null) {
elements.add(definer);
}
}
else if (element != null && element.getContainingFile() == this) {
elements.add(element);
}
}
final PsiElement element = elements.isEmpty() ? null : elements.get(elements.size() - 1);
if (element != null && !element.isValid()) {
throw new PsiInvalidElementAccessException(element);
}
return element;
}
@NotNull
@Override
public List<RatedResolveResult> multiResolveName(@NotNull final String name) {
return multiResolveName(name, true);
}
@NotNull
@Override
public List<RatedResolveResult> multiResolveName(@NotNull String name, boolean exported) {
final List<RatedResolveResult> results = RecursionManager.doPreventingRecursion(this, false,
() -> getExportedNameCache().multiResolve(name));
if (results != null && !results.isEmpty()) {
return results;
}
final List<String> allNames = getDunderAll();
if (allNames != null && !name.contains(".") && allNames.contains(name)) {
final ResolveResultList allFallbackResults = new ResolveResultList();
PyResolveImportUtil
.resolveModuleAt(QualifiedName.fromComponents(name), getContainingDirectory(), PyResolveImportUtil.fromFoothold(this))
.forEach(module -> allFallbackResults.poke(module, RatedResolveResult.RATE_LOW));
final PsiElement allElement = findExportedName(PyNames.ALL);
allFallbackResults.poke(allElement, RatedResolveResult.RATE_LOW);
return allFallbackResults;
}
return Collections.emptyList();
}
private ExportedNameCache getExportedNameCache() {
ExportedNameCache cache = myExportedNameCache.get();
final long modificationStamp = getModificationStamp();
if (cache != null && modificationStamp != cache.getModificationStamp()) {
myExportedNameCache.clear();
cache = null;
}
if (cache == null) {
cache = new ExportedNameCache(modificationStamp);
myExportedNameCache = new SoftReference<>(cache);
}
return cache;
}
@Override
@Nullable
public PsiElement getElementNamed(final String name) {
final List<RatedResolveResult> results = multiResolveName(name);
final List<PsiElement> elements = PyUtil.filterTopPriorityResults(results.toArray(ResolveResult.EMPTY_ARRAY));
final PsiElement element = elements.isEmpty() ? null : elements.get(elements.size() - 1);
if (element != null) {
if (!element.isValid()) {
throw new PsiInvalidElementAccessException(element);
}
return element;
}
return null;
}
@Override
@NotNull
public Iterable<PyElement> iterateNames() {
final List<PyElement> result = new ArrayList<>();
final VariantsProcessor processor = new VariantsProcessor(this) {
@Override
protected void addElement(@NotNull String name, @NotNull PsiElement element) {
element = PyUtil.turnDirIntoInit(element);
if (element instanceof PyElement) {
result.add((PyElement)element);
}
}
};
processor.setAllowedNames(getDunderAll());
processDeclarations(processor, ResolveState.initial(), null, this);
return result;
}
@Override
@NotNull
public List<PyImportElement> getImportTargets() {
final List<PyImportElement> ret = new ArrayList<>();
final List<PyImportStatement> imports = PyPsiUtils.collectStubChildren(this, getStub(), PyElementTypes.IMPORT_STATEMENT);
for (PyImportStatement one : imports) {
ContainerUtil.addAll(ret, one.getImportElements());
}
return ret;
}
@Override
@NotNull
public List<PyFromImportStatement> getFromImports() {
return PyPsiUtils.collectStubChildren(this, getStub(), PyElementTypes.FROM_IMPORT_STATEMENT);
}
@Nullable
@Override
public List<String> getDunderAll() {
final StubElement stubElement = getStub();
if (stubElement instanceof PyFileStub) {
return ((PyFileStub)stubElement).getDunderAll();
}
if (!myDunderAllCalculated) {
final List<String> dunderAll = calculateDunderAll();
myDunderAll = dunderAll == null ? null : Collections.unmodifiableList(dunderAll);
myDunderAllCalculated = true;
}
return myDunderAll;
}
@Nullable
public List<String> calculateDunderAll() {
final DunderAllBuilder builder = new DunderAllBuilder();
accept(builder);
return builder.result();
}
private static class DunderAllBuilder extends PyRecursiveElementVisitor {
@NotNull
private final List<String> myResult = new ArrayList<>();
private boolean myDynamic = false;
private boolean myFoundDunderAll = false;
// hashlib builds __all__ by concatenating multiple lists of strings, and we want to understand this
@NotNull
private final Map<String, List<String>> myDunderLike = new HashMap<>();
@Override
public void visitPyFile(@NotNull PyFile node) {
if (node.getText().contains(PyNames.ALL)) {
super.visitPyFile(node);
}
}
@Override
public void visitPyTargetExpression(@NotNull PyTargetExpression node) {
if (myDynamic) return;
if (PyNames.ALL.equals(node.getName())) {
myFoundDunderAll = true;
final PyExpression value = node.findAssignedValue();
if (value instanceof PyBinaryExpression) {
final PyBinaryExpression binaryExpression = (PyBinaryExpression)value;
if (binaryExpression.isOperator("+")) {
processSubList(getStringListFromValue(binaryExpression.getLeftExpression()));
processSubList(getStringListFromValue(binaryExpression.getRightExpression()));
}
else {
myDynamic = true;
}
}
else {
processSubList(getStringListFromValue(value));
}
}
if (!myFoundDunderAll) {
final List<String> names = getStringListFromValue(node.findAssignedValue());
if (names != null) {
myDunderLike.put(node.getName(), names);
}
}
}
@Override
public void visitPyAugAssignmentStatement(@NotNull PyAugAssignmentStatement node) {
if (myDynamic || !myFoundDunderAll) return;
if (PyNames.ALL.equals(node.getTarget().getName())) {
processSubList(getStringListFromValue(node.getValue()));
}
}
@Override
public void visitPyCallExpression(@NotNull PyCallExpression node) {
if (myDynamic || !myFoundDunderAll) return;
final PyExpression callee = node.getCallee();
if (callee instanceof PyQualifiedExpression) {
final PyExpression qualifier = ((PyQualifiedExpression)callee).getQualifier();
if (qualifier != null && PyNames.ALL.equals(qualifier.getText())) {
final String calleeName = callee.getName();
if (PyNames.APPEND.equals(calleeName)) {
final PyStringLiteralExpression argument = node.getArgument(0, PyStringLiteralExpression.class);
if (argument != null) {
myResult.add(argument.getStringValue());
return;
}
}
else if (PyNames.EXTEND.equals(calleeName)) {
final PyExpression argument = node.getArgument(0, PyExpression.class);
processSubList(getStringListFromValue(argument));
return;
}
myDynamic = true;
}
}
}
@Override
public void visitPyClass(@NotNull PyClass node) {
}
@Nullable
List<String> result() {
return myDynamic || !myFoundDunderAll ? null : myResult;
}
private void processSubList(@Nullable List<String> list) {
if (list == null) {
myDynamic = true;
}
else {
myResult.addAll(list);
}
}
@Nullable
private List<String> getStringListFromValue(@Nullable PyExpression expression) {
if (expression instanceof PyReferenceExpression && !((PyReferenceExpression)expression).isQualified()) {
return myDunderLike.get(((PyReferenceExpression)expression).getReferencedName());
}
return PyUtil.strListValue(expression);
}
}
@Override
public boolean hasImportFromFuture(FutureFeature feature) {
final StubElement stub = getStub();
if (stub instanceof PyFileStub) {
return ((PyFileStub)stub).getFutureFeatures().get(feature.ordinal());
}
Boolean enabled = myFutureFeatures.get(feature);
if (enabled == null) {
enabled = calculateImportFromFuture(feature);
myFutureFeatures.put(feature, enabled);
// NOTE: ^^^ not synchronized. if two threads will try to modify this, both can only be expected to set the same value.
}
return enabled;
}
@Override
public String getDeprecationMessage() {
final StubElement stub = getStub();
if (stub instanceof PyFileStub) {
return ((PyFileStub)stub).getDeprecationMessage();
}
return extractDeprecationMessage();
}
@Override
public List<PyImportStatementBase> getImportBlock() {
final List<PyImportStatementBase> result = new ArrayList<>();
final PsiElement firstChild = getFirstChild();
PsiElement currentStatement;
if (firstChild instanceof PyImportStatementBase) {
currentStatement = firstChild;
}
else {
currentStatement = PsiTreeUtil.getNextSiblingOfType(firstChild, PyImportStatementBase.class);
}
if (currentStatement != null) {
// skip imports from future before module level dunders
final List<PyImportStatementBase> fromFuture = new ArrayList<>();
while (currentStatement instanceof PyFromImportStatement && ((PyFromImportStatement)currentStatement).isFromFuture()) {
fromFuture.add((PyImportStatementBase)currentStatement);
currentStatement = PyPsiUtils.getNextNonCommentSibling(currentStatement, true);
}
// skip module level dunders
boolean hasModuleLevelDunders = false;
while (AddImportHelper.isAssignmentToModuleLevelDunderName(currentStatement)) {
hasModuleLevelDunders = true;
currentStatement = PyPsiUtils.getNextNonCommentSibling(currentStatement, true);
}
// if there is an import from future and a module level-dunder between it and other imports,
// this import is not considered a part of the import block to avoid problems with "Optimize imports" and foldings
if (!hasModuleLevelDunders) {
result.addAll(fromFuture);
}
// collect imports
while (currentStatement instanceof PyImportStatementBase) {
result.add((PyImportStatementBase)currentStatement);
currentStatement = PyPsiUtils.getNextNonCommentSibling(currentStatement, true);
}
}
return result;
}
public String extractDeprecationMessage() {
if (canHaveDeprecationMessage(getText())) {
return PyFunctionImpl.extractDeprecationMessage(getStatements());
}
else {
return null;
}
}
private static boolean canHaveDeprecationMessage(String text) {
return text.contains(PyNames.DEPRECATION_WARNING) || text.contains(PyNames.PENDING_DEPRECATION_WARNING);
}
public boolean calculateImportFromFuture(FutureFeature feature) {
if (getText().contains(feature.toString())) {
final List<PyFromImportStatement> fromImports = getFromImports();
for (PyFromImportStatement fromImport : fromImports) {
if (fromImport.isFromFuture()) {
final PyImportElement[] pyImportElements = fromImport.getImportElements();
for (PyImportElement element : pyImportElements) {
final QualifiedName qName = element.getImportedQName();
if (qName != null && qName.matches(feature.toString())) {
return true;
}
}
}
}
}
return false;
}
@Nullable
@Override
public PyType getType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
if (myType == null) myType = new PyModuleType(this);
return myType;
}
@Nullable
@Override
public String getDocStringValue() {
return DocStringUtil.getDocStringValue(this);
}
@Nullable
@Override
public StructuredDocString getStructuredDocString() {
return DocStringUtil.getStructuredDocString(this);
}
@Nullable
@Override
public PyStringLiteralExpression getDocStringExpression() {
return DocStringUtil.findDocStringExpression(this);
}
@Override
public void clearCaches() {
super.clearCaches();
ControlFlowCache.clear(this);
myDunderAllCalculated = false;
myFutureFeatures.clear(); // probably no need to synchronize
myExportedNameCache.clear();
}
@Override
public void delete() throws IncorrectOperationException {
String path = getVirtualFile().getPath();
super.delete();
PyUtil.deletePycFiles(path);
}
@Override
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
String path = getVirtualFile().getPath();
final PsiElement newElement = super.setName(name);
PyUtil.deletePycFiles(path);
return newElement;
}
@Override
public ItemPresentation getPresentation() {
return new ItemPresentation() {
@Override
public String getPresentableText() {
return getModuleName(PyFileImpl.this);
}
@Override
public String getLocationString() {
final String name = getLocationName();
return name != null ? "(" + name + ")" : null;
}
@Override
public Icon getIcon(final boolean open) {
if (PyUtil.isPackage(PyFileImpl.this)) {
return AllIcons.Nodes.Package;
}
return PyFileImpl.this.getIcon(0);
}
@NotNull
private String getModuleName(@NotNull PyFile file) {
if (PyUtil.isPackage(file)) {
final PsiDirectory dir = file.getContainingDirectory();
if (dir != null) {
return dir.getName();
}
}
return FileUtilRt.getNameWithoutExtension(file.getName());
}
@Nullable
private String getLocationName() {
final QualifiedName name = QualifiedNameFinder.findShortestImportableQName(PyFileImpl.this);
if (name != null) {
final QualifiedName prefix = name.removeTail(1);
if (prefix.getComponentCount() > 0) {
return prefix.toString();
}
}
final String relativePath = getRelativeContainerPath();
if (relativePath != null) {
return relativePath;
}
final PsiDirectory psiDirectory = getParent();
if (psiDirectory != null) {
return psiDirectory.getVirtualFile().getPresentableUrl();
}
return null;
}
@Nullable
private String getRelativeContainerPath() {
final PsiDirectory psiDirectory = getParent();
if (psiDirectory != null) {
final VirtualFile virtualFile = getVirtualFile();
if (virtualFile != null) {
final VirtualFile root = ProjectFileIndex.SERVICE.getInstance(getProject()).getContentRootForFile(virtualFile);
if (root != null) {
final VirtualFile parent = virtualFile.getParent();
final VirtualFile rootParent = root.getParent();
if (rootParent != null && parent != null) {
return VfsUtilCore.getRelativePath(parent, rootParent, File.separatorChar);
}
}
}
}
return null;
}
};
}
}
| |
package net.smartrover.skyrover.services;
import java.util.ArrayList;
import android.app.Application;
import android.content.Intent;
import android.os.Handler;
import android.os.Message;
import android.support.v4.content.LocalBroadcastManager;
import net.smartrover.skyrover.services.ConnectStateManager;
import net.smartrover.skyrover.util.DebugHandler;
/**
* @author OROCA SmartRover Team
* @since 0.1
*/
public class IpcProxy {
private final static String TAG = "IpcProxy";
private Application appContext = null;
private String[] navData = null;
private VIConfig viConfig = null;
private LocalBroadcastManager mLocalBroadcastManager = null;
private Intent connectStateIntent = null;
private ArrayList<OnRecordCompleteListener> mOnRecordCompleteListeners = new ArrayList<OnRecordCompleteListener>();
private Handler mHandler = null;
private int mDecMode = 0;
public final static String ACTION_DECODEMODE_CHANGED = "action_decodeMode_changed";
public final static String ACTION_CONNECT_QUALITY_CHANGED = "action_connect_quality_changed";
public final static String EXTRA_DECODE_MODE = "decode_mode";
public final static String EXTRA_CONNECT_QUALITY = "connect_quality";
public final static int MESSAGE_DECODEMODE_CHANGED = 5656;
public final static int MESSAGE_RECORD_COMPLETED = 1001;
public final static int MESSAGE_CONNECT_QUALITY_CHANGEBAD = 1002;
public final static int MESSAGE_CONNECT_QUALITY_CHANGEGOOD = 1003;
public final static int DEFAULT_DECODE_MODE = 1; //default mode is soft
public final static int DEFAULT_PREVIEW_RESOLUTION_WIDTH = 1280;
public final static int DEFAULT_PREVIEW_RESOLUTION_HEIGHT = 720;
public final static int DEFAULT_PREVIEW_BITRATECONTROL = 1; //1 fps 2 quality
public IpcProxy(Application app) {
// navData = new NavData();
viConfig = new VIConfig();
mLocalBroadcastManager = LocalBroadcastManager.getInstance(app);
connectStateIntent = new Intent(
ConnectStateManager.ACTION_CONNECT_STATE_CHANGED);
mHandler = new CallbackHandler();
}
private void setAppContext(Application app) {
appContext = app;
}
public void doConnect() {
connect(null);
}
public void doPause() {
pause();
}
public void doResume() {
resume();
}
public void doDisconnect() {
disconnect();
}
public void doTriggerTakeOff() {
triggerTakeOff();
}
public void doStartPreview() {
startPreview();
}
public void doStopPreview() {
stopPreview();
}
public void doOnSizeChanged(int width, int height) {
onSizeChange(width, height);
}
public void doTakePhoto(String destDir, String name, boolean containGPS) {
takePhoto(destDir, name, containGPS);
}
public void doStartRecord(String destDir, String cacheDir, String name,
boolean containGPS) {
startRecord(destDir, cacheDir, name, containGPS);
}
public void doStopRecord() {
stopRecord();
}
public String[] getNavData() {
return navData;
}
public void doUpdateNavData() {
navData = takeNavDataSnapshot(null);
}
public void doSendMessage2Server(String[] key, String[] value) {
sendMessage2Server(key, value);
}
public void doUpdateVIConfig() {
viConfig = takeConfigSnapshot(viConfig);
}
public VIConfig getVIConfig() {
return viConfig;
}
public String doGetConfigItem(String configName) {
return getConfigItem(configName);
}
public void doSetConfigItem(String configName, String value) {
setConfigItem(configName, value);
}
public void doResetConfig() {
resetConfig();
}
public void setIpcDecMode(int decmode) {
setDecodeStrategy(decmode);
}
public void onRecordComplete(boolean isSuccess) {
if (mOnRecordCompleteListeners == null
|| mOnRecordCompleteListeners.size() == 0)
return;
for (int i = 0; i < mOnRecordCompleteListeners.size(); i++) {
mOnRecordCompleteListeners.get(i).onRecordComplete(isSuccess);
}
}
public static interface OnRecordCompleteListener {
public void onRecordComplete(boolean isSuccess);
}
public void addOnRecordCompleteListener(OnRecordCompleteListener lis) {
mOnRecordCompleteListeners.add(lis);
}
public void removeOnRecordCompleteListener(OnRecordCompleteListener lis) {
mOnRecordCompleteListeners.remove(lis);
}
/**
* ------------------------------------------------------------------------
* -- native api function
*/
// api for connect to device
public native void connect(String address);
public native void setDecodeStrategy(int strategy);
public native void pause();
public native void resume();
public native void disconnect();
public native void triggerTakeOff();
public void ipcConnectStateChanged(int state, String info) {
connectStateIntent.putExtra(ConnectStateManager.EXTRA_STATE, state);
connectStateIntent.putExtra(ConnectStateManager.EXTRA_INFO, info);
mLocalBroadcastManager.sendBroadcast(connectStateIntent);
}
// api for media
public native void startPreview();
public native void stopPreview();
public native void setPreviewResolution(int width,int height);
public native void setBitrateControlType(int type);
public void ipcPreviewStateChanged(int state, String info) {
}
public native void onSizeChange(int width, int height);
public native void takePhoto(String destDir, String name, boolean containGPS);
public native void startRecord(String destDir, String cacheDir,
String name, boolean containGPS);
public native void stopRecord();
public void ipcRecordVideoStateChanged(int state, String info) {
}
// api for getting IPCDevice function state
public native String[] takeNavDataSnapshot(NavData navData);
public native void sendMessage2Server(String[] keys, String[] value);
public void ipcFunctionStateChanged(NavData newNavData) {
}
// api for getting/setting IPCDevice configuration
public native VIConfig takeConfigSnapshot(VIConfig config);
public native String getConfigItem(String configName);
public native void setConfigItem(String configName, String value);
public native void resetConfig();
public void ipcConfigChanged(String configName, String value, String info) {
}
public void sendMsgByJni(int cmd, String msg) {
DebugHandler.logd(TAG, "sendMsgByJni: " + cmd);
Message msg2 = new Message();
msg2.what = cmd;
msg2.obj = msg;
mHandler.sendMessage(msg2);
}
private class CallbackHandler extends Handler {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_DECODEMODE_CHANGED: {
Intent intent = new Intent(ACTION_DECODEMODE_CHANGED);
intent.putExtra(EXTRA_DECODE_MODE, (String) msg.obj);
mLocalBroadcastManager.sendBroadcast(intent);
break;
}
case MESSAGE_RECORD_COMPLETED: {
int success = (Integer) msg.obj;
onRecordComplete(success == 0 ? false : true);
break;
}
case MESSAGE_CONNECT_QUALITY_CHANGEBAD:{
Intent intent = new Intent(ACTION_CONNECT_QUALITY_CHANGED);
intent.putExtra(EXTRA_CONNECT_QUALITY, -1);
mLocalBroadcastManager.sendBroadcast(intent);
break;
}
case MESSAGE_CONNECT_QUALITY_CHANGEGOOD:{
Intent intent = new Intent(ACTION_CONNECT_QUALITY_CHANGED);
intent.putExtra(EXTRA_CONNECT_QUALITY, 1);
mLocalBroadcastManager.sendBroadcast(intent);
break;
}
}
}
};
}
| |
import java.awt.Dimension;
import java.net.URL;
import java.util.Optional;
import java.util.ResourceBundle;
import javax.imageio.ImageIO;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.w3c.dom.events.EventListener;
import org.w3c.dom.events.EventTarget;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.concurrent.Worker;
import javafx.concurrent.Worker.State;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.control.TextInputDialog;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.scene.web.WebView;
/**
* A class used to display the viewer for a simple HTML browser.
*
* See this tutorial for help on how to use the variety of components:
* http://download.oracle.com/otndocs/products/javafx/2/samples/Ensemble/
*
* @author Owen Astrachan
* @author Marcin Dobosz
* @author Yuzhang Han
* @author Edwin Ward
* @author Robert C. Duvall
*/
public class BrowserView {
// constants
public static final Dimension DEFAULT_SIZE = new Dimension(800, 600);
public static final String DEFAULT_RESOURCE_PACKAGE = "resources/";
public static final String STYLESHEET = "default.css";
public static final String BLANK = " ";
// scene, needed to report back to Application
private Scene myScene;
// web page
private WebView myPage;
// information area
private Label myStatus;
// navigation
private TextField myURLDisplay;
private Button myBackButton;
private Button myNextButton;
private Button myHomeButton;
private Button myFavoirateButton;
// favorites
private ComboBox<String> myFavorites;
// get strings from resource file
private ResourceBundle myResources;
// the data
private BrowserModel myModel;
/**
* Create a view of the given model of a web browser.
*/
public BrowserView (BrowserModel model, String language) {
myModel = model;
// use resources for labels
myResources = ResourceBundle.getBundle(DEFAULT_RESOURCE_PACKAGE + language);
BorderPane root = new BorderPane();
// must be first since other panels may refer to page
root.setCenter(makePageDisplay());
root.setTop(makeInputPanel());
root.setBottom(makeInformationPanel());
// control the navigation
enableButtons();
// create scene to hold UI
myScene = new Scene(root, DEFAULT_SIZE.width, DEFAULT_SIZE.height);
myScene.getStylesheets().add(DEFAULT_RESOURCE_PACKAGE + STYLESHEET);
}
/**
* Display given URL.
*/
public void showPage (String url) {
URL valid = myModel.go(url);
if (url != null) {
update(valid);
}
else {
showError("Could not load " + url);
}
}
/**
* Returns scene for this view so it can be added to stage.
*/
public Scene getScene () {
return myScene;
}
/**
* Display given message as information in the GUI.
*/
public void showStatus (String message) {
myStatus.setText(message);
}
/**
* Display given message as an error in the GUI.
*/
public void showError (String message) {
Alert alert = new Alert(AlertType.ERROR);
alert.setTitle(myResources.getString("ErrorTitle"));
alert.setContentText(message);
alert.showAndWait();
}
// move to the next URL in the history
private void next () {
update(myModel.next());
}
// move to the previous URL in the history
private void back () {
update(myModel.back());
}
// change current URL to the home page, if set
private void home () {
showPage(myModel.getHome().toString());
}
// change page to favorite choice
private void showFavorite (String favorite) {
showPage(myModel.getFavorite(favorite).toString());
}
// update just the view to display given URL
private void update (URL url) {
myPage.getEngine().load(url.toString());
myURLDisplay.setText(url.toString());
enableButtons();
}
// prompt user for name of favorite to add to collection
private void addFavorite () {
TextInputDialog input = new TextInputDialog("");
input.setTitle(myResources.getString("FavoritePromptTitle"));
input.setContentText(myResources.getString("FavoritePrompt"));
Optional<String> response = input.showAndWait();
// did user make a choice?
if (response.isPresent()) {
myModel.addFavorite(response.get());
myFavorites.getItems().add(response.get());
}
}
// only enable buttons when useful to user
private void enableButtons () {
myBackButton.setDisable(! myModel.hasPrevious());
myNextButton.setDisable(! myModel.hasNext());
myHomeButton.setDisable(myModel.getHome() == null);
}
// convenience method to create HTML page display
private Node makePageDisplay () {
myPage = new WebView();
// catch "browsing" events within web page
myPage.getEngine().getLoadWorker().stateProperty().addListener(new LinkListener());
return myPage;
}
// organize user's options for controlling/giving input to model
private Node makeInputPanel () {
VBox result = new VBox();
result.getChildren().addAll(makeNavigationPanel(), makePreferencesPanel());
return result;
}
// make the panel where "would-be" clicked URL is displayed
private Node makeInformationPanel () {
// BLANK must be non-empty or status label will not be displayed in GUI
myStatus = new Label(BLANK);
return myStatus;
}
// make user-entered URL/text field and back/next buttons
private Node makeNavigationPanel () {
HBox result = new HBox();
// create buttons, with their associated actions
// old style way to do set up callback (anonymous class)
myBackButton = makeButton("BackCommand", new EventHandler<ActionEvent>() {
@Override
public void handle (ActionEvent event) {
back();
}
});
result.getChildren().add(myBackButton);
// new style way to do set up callback (lambdas)
myNextButton = makeButton("NextCommand", event -> next());
result.getChildren().add(myNextButton);
/* for(String string: myFavorites.getItems() ){
showFavorite(string);
}*/
// if user presses button or enter in text field, load/show the URL
EventHandler<ActionEvent> showHandler = new ShowPage();
result.getChildren().add(makeButton("GoCommand", showHandler));
myURLDisplay = makeInputField(40, showHandler);
result.getChildren().add(myURLDisplay);
myHomeButton = makeButton("HomeCommand", event -> home());
result.getChildren().add(myHomeButton);
myFavoirateButton = makeButton("AddFavoriteCommand", event -> addFavorite());
result.getChildren().add(myFavoirateButton);
myFavorites= new ComboBox<String>();
myFavorites.valueProperty().addListener(new ChangeListener<String>() {
@Override public void changed(ObservableValue<? extends String> observable,
String oldValue,
String newValue){
showFavorite(newValue);
}
});
result.getChildren().add(myFavorites);
return result;
}
// make buttons for setting favorites/home URLs
private Node makePreferencesPanel () {
HBox result = new HBox();
result.getChildren().add(makeButton("SetHomeCommand", event -> {
myModel.setHome();
enableButtons();
}));
return result;
}
// makes a button using either an image or a label
private Button makeButton (String property, EventHandler<ActionEvent> handler) {
// represent all supported image suffixes
final String IMAGEFILE_SUFFIXES =
String.format(".*\\.(%s)", String.join("|", ImageIO.getReaderFileSuffixes()));
Button result = new Button();
String label = myResources.getString(property);
if (label.matches(IMAGEFILE_SUFFIXES)) {
result.setGraphic(new ImageView(
new Image(getClass().getResourceAsStream(DEFAULT_RESOURCE_PACKAGE + label))));
} else {
result.setText(label);
}
result.setOnAction(handler);
return result;
}
// make text field for input
private TextField makeInputField (int width, EventHandler<ActionEvent> handler) {
TextField result = new TextField();
result.setPrefColumnCount(width);
result.setOnAction(handler);
return result;
}
// display page
// very old style way create a callback (inner class)
private class ShowPage implements EventHandler<ActionEvent> {
@Override
public void handle (ActionEvent event) {
showPage(myURLDisplay.getText());
}
}
// Inner class to deal with link-clicks and mouse-overs Mostly taken from
// http://blogs.kiyut.com/tonny/2013/07/30/javafx-webview-addhyperlinklistener/
private class LinkListener implements ChangeListener<State> {
public static final String EVENT_CLICK = "click";
public static final String EVENT_MOUSEOVER = "mouseover";
public static final String EVENT_MOUSEOUT = "mouseout";
@Override
public void changed (ObservableValue<? extends State> ov, State oldState, State newState) {
if (newState == Worker.State.SUCCEEDED) {
EventListener listener = event -> {
final String href = ((Element)event.getTarget()).getAttribute("href");
if (href != null) {
String domEventType = event.getType();
if (domEventType.equals(EVENT_CLICK)) {
showPage(href);
} else if (domEventType.equals(EVENT_MOUSEOVER)) {
showStatus(href);
} else if (domEventType.equals(EVENT_MOUSEOUT)) {
showStatus(BLANK);
}
}
};
Document doc = myPage.getEngine().getDocument();
NodeList nodes = doc.getElementsByTagName("a");
for (int i = 0; i < nodes.getLength(); i++) {
EventTarget node = (EventTarget)nodes.item(i);
node.addEventListener(EVENT_CLICK, listener, false);
node.addEventListener(EVENT_MOUSEOVER, listener, false);
node.addEventListener(EVENT_MOUSEOUT, listener, false);
}
}
}
};
}
| |
/*
* Copyright 2016, Leanplum, Inc. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.leanplum.internal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Helper class to easily create new list, map or set objects containing provided parameters.
*
* @author Ben Marten
*/
public class CollectionUtil {
/**
* Creates a new ArrayList and adds the passed arguments to it.
*
* @param items The items to add to the list.
* @param <T> The type of the list to be created.
* @return A typed list that contains the passed arguments.
*/
@SafeVarargs
public static <T> ArrayList<T> newArrayList(T... items) {
ArrayList<T> result = new ArrayList<>((items != null) ? items.length : 0);
if (items != null) {
Collections.addAll(result, items);
}
return result;
}
/**
* Creates a new HashSet and adds the passed arguments to it.
*
* @param items The items to add to the set.
* @param <T> The type of the set to be created.
* @return A typed set that contains the passed arguments.
*/
@SafeVarargs
static <T> HashSet<T> newHashSet(T... items) {
HashSet<T> result = new HashSet<>((items != null) ? items.length : 0);
if (items != null) {
Collections.addAll(result, items);
}
return result;
}
/**
* Creates a new HashMap and adds the passed arguments to it in pairs.
*
* @param items The keys and values, to add to the map in pairs.
* @param <T> The type of the map to be created.
* @return A typed map that contains the passed arguments.
* @throws IllegalArgumentException Throws an exception when an uneven number of arguments are
* passed.
*/
@SuppressWarnings("unchecked")
public static <T, U> HashMap<T, U> newHashMap(Object... items) {
return (HashMap<T, U>) newMap(
new HashMap((items != null) ? items.length : 0),
(items != null) ? items : null);
}
/**
* Creates a new HashMap and adds the passed arguments to it in pairs.
*
* @param items The keys and values, to add to the map in pairs.
* @param <T> The type of the map to be created.
* @return A typed map that contains the passed arguments.
* @throws IllegalArgumentException Throws an exception when an uneven number of arguments are
* passed.
*/
@SuppressWarnings("unchecked")
static <T, U> LinkedHashMap<T, U> newLinkedHashMap(Object... items) {
return (LinkedHashMap<T, U>) newMap(
new LinkedHashMap((items != null) ? items.length : 0),
(items != null) ? items : null);
}
/**
* Creates a new Map and adds the passed arguments to it in pairs.
*
* @param items The keys and values, to add to the map in pairs.
* @param <T> The type of the map to be created.
* @return A typed map that contains the passed arguments.
* @throws IllegalArgumentException Throws an exception when an even number of arguments are
* passed, or the type parameter is not a subclass of map.
*/
@SuppressWarnings("unchecked")
private static <T, U> Map<T, U> newMap(Map<T, U> map, T[] items) {
if (items == null || items.length == 0) {
return map;
}
if (items.length % 2 != 0) {
throw new IllegalArgumentException("newMap requires an even number of items.");
}
for (int i = 0; i < items.length; i += 2) {
map.put(items[i], (U) items[i + 1]);
}
return map;
}
/**
* Returns the components of an array as concatenated String by calling toString() on each item.
*
* @param array The array to be concatenated.
* @param separator The separator between elements.
* @return A concatenated string of the items in list.
*/
static <T> String concatenateArray(T[] array, String separator) {
if (array == null) {
return null;
}
return concatenateList(Arrays.asList(array), separator);
}
/**
* Returns the components of a list as concatenated String by calling toString() on each item.
*
* @param list The list to be concatenated.
* @param separator The separator between elements.
* @return A concatenated string of the items in list.
*/
static String concatenateList(List<?> list, String separator) {
if (list == null) {
return null;
}
if (separator == null) {
separator = "";
}
StringBuilder stringBuilder = new StringBuilder();
for (Object item : list) {
if (item != null) {
stringBuilder.append(item.toString());
stringBuilder.append(separator);
}
}
String result = stringBuilder.toString();
if (result.length() > 0) {
return result.substring(0, result.length() - separator.length());
} else {
return result;
}
}
@SuppressWarnings({"unchecked"})
public static <T> T uncheckedCast(Object obj) {
return (T) obj;
}
/**
* Gets value from map or default if key isn't found.
*
* @param map Map to get value from.
* @param key Key we are looking for.
* @param defaultValue Default value if key isn't found.
* @return Value or default if not found.
*/
public static <K, V> V getOrDefault(Map<K, V> map, K key, V defaultValue) {
if (map == null) {
return defaultValue;
}
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
* Converts an array of object Longs to primitives.
*
* @param array Array to convert.
* @return Array of long primitives.
*/
public static long[] toPrimitive(final Long[] array) {
if (array == null) {
return null;
} else if (array.length == 0) {
return new long[0];
}
final long[] result = new long[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i].longValue();
}
return result;
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// begin Kuali Foundation modification
package org.kuali.rice.kns.web.struts.form.pojo;
import org.apache.commons.beanutils.DynaBean;
import org.apache.commons.beanutils.DynaProperty;
import org.apache.commons.beanutils.MappedPropertyDescriptor;
import org.apache.commons.beanutils.MethodUtils;
import org.apache.commons.beanutils.NestedNullException;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.beanutils.PropertyUtilsBean;
import org.apache.commons.beanutils.WrapDynaBean;
import org.apache.commons.collections.FastHashMap;
import org.apache.log4j.Logger;
import org.apache.ojb.broker.metadata.ClassDescriptor;
import org.apache.ojb.broker.metadata.ClassNotPersistenceCapableException;
import org.apache.ojb.broker.metadata.CollectionDescriptor;
import org.apache.ojb.broker.metadata.DescriptorRepository;
import org.apache.ojb.broker.metadata.MetadataManager;
import org.kuali.rice.core.web.format.Formatter;
import org.kuali.rice.krad.bo.PersistableBusinessObject;
import org.kuali.rice.krad.service.KRADServiceLocator;
import org.kuali.rice.krad.service.PersistenceStructureService;
import org.kuali.rice.krad.util.ObjectUtils;
import java.beans.IndexedPropertyDescriptor;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* begin Kuali Foundation modification
* This class is used to access the properties of a Pojo bean.
* deleted author tag
* end Kuali Foundation modification
*/
// Kuali Foundation modification: class originally SLPropertyUtilsBean
public class PojoPropertyUtilsBean extends PropertyUtilsBean {
public static final Logger LOG = Logger.getLogger(PojoPropertyUtilsBean.class.getName());
/**
* Thin interface for determining the appropriate item class for a collection property
*/
public static interface CollectionItemClassProvider {
public Class getCollectionItemClass(Object bean, String property);
}
/**
* CollectionItemClassProvider backed by OJB metadata
*/
public static class PersistenceStructureServiceProvider implements CollectionItemClassProvider {
protected static PersistenceStructureService persistenceStructureService = null;
protected static PersistenceStructureService getPersistenceStructureService() {
if (persistenceStructureService == null) {
persistenceStructureService = KRADServiceLocator.getPersistenceStructureService();
}
return persistenceStructureService;
}
@Override
public Class getCollectionItemClass(Object bean, String property) {
Map<String, Class> collectionObjectTypes = getPersistenceStructureService().listCollectionObjectTypes(bean.getClass());
return collectionObjectTypes.get(property);
}
}
// default is to consult OJB
protected static CollectionItemClassProvider collectionItemClassProvider = new PersistenceStructureServiceProvider();
// begin Kuali Foundation modification
public PojoPropertyUtilsBean() {
super();
}
// end Kuali Foundation modification
public Object getProperty(Object bean, String key) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
// begin Kuali Foundation modification
if (!(bean instanceof PojoForm))
return super.getProperty(bean, key);
PojoForm form = (PojoForm) bean;
Map unconvertedValues = form.getUnconvertedValues();
if (unconvertedValues.containsKey(key))
return unconvertedValues.get(key);
Object val = getNestedProperty(bean, key);
Class type = (val!=null)?val.getClass():null;
if ( type == null ) {
try {
type = getPropertyType(bean, key);
} catch ( Exception ex ) {
type = String.class;
LOG.warn( "Unable to get property type for Class: " + bean.getClass().getName() + "/Property: " + key );
}
}
return (Formatter.isSupportedType(type) ? form.formatValue(val, key, type) : val);
// end Kuali Foundation modification
}
// begin Kuali Foundation modification
private Map<String,List<Method>> cache = new HashMap<String, List<Method>>();
private static Map<String,Method> readMethodCache = new HashMap<String, Method>();
private IntrospectionException introspectionException = new IntrospectionException( "" );
public Object fastGetNestedProperty(Object obj, String propertyName) throws IntrospectionException, IllegalArgumentException, IllegalAccessException, InvocationTargetException {
//logger.debug("entering fastGetNestedProperty");
List<Method> methods = (List<Method>) cache.get(propertyName + obj.getClass().getName());
if (methods == null) {
methods = new ArrayList<Method>();
Object currentObj = obj;
Class<?> currentObjClass = currentObj.getClass();
for (String currentPropertyName : propertyName.split("\\.") ) {
String cacheKey = currentObjClass.getName() + currentPropertyName;
Method readMethod = readMethodCache.get( cacheKey );
if ( readMethod == null ) {
synchronized (readMethodCache) {
// if the read method was resolved to an error, repeat the exception
// rather than performing the reflection calls below
if ( readMethodCache.containsKey(cacheKey) ) {
throw introspectionException;
}
try {
try {
readMethod = currentObjClass.getMethod("get" + currentPropertyName.substring(0, 1).toUpperCase() + currentPropertyName.substring(1), (Class[])null);
} catch (NoSuchMethodException e) {
readMethod = currentObjClass.getMethod("is" + currentPropertyName.substring(0, 1).toUpperCase() + currentPropertyName.substring(1), (Class[])null);
}
} catch ( NoSuchMethodException ex ) {
// cache failures to prevent re-checking of the parameter
readMethodCache.put( cacheKey, null );
throw introspectionException;
}
readMethodCache.put(cacheKey, readMethod );
}
}
methods.add(readMethod);
currentObjClass = readMethod.getReturnType();
}
synchronized (cache) {
cache.put(propertyName + obj.getClass().getName(), methods);
}
}
for ( Method method : methods ) {
obj = method.invoke(obj, (Object[])null);
}
//logger.debug("exiting fastGetNestedProperty");
return obj;
}
// end Kuali Foundation modification
/*
* Kuali modification to make isWriteable work like it did in beanUtils 1.7.
* Checking for nested nulls caused exceptions in rice 2.0.
*/
@Override
public boolean isWriteable(Object bean, String name) {
// Validate method parameters
if (bean == null) {
throw new IllegalArgumentException("No bean specified");
}
if (name == null) {
throw new IllegalArgumentException("No name specified for bean class '" +
bean.getClass() + "'");
}
// Remove any subscript from the final name value
name = getResolver().getProperty(name);
// Treat WrapDynaBean as special case - may be a read-only property
// (see Jira issue# BEANUTILS-61)
if (bean instanceof WrapDynaBean) {
bean = ((WrapDynaBean)bean).getInstance();
}
// Return the requested result
if (bean instanceof DynaBean) {
// All DynaBean properties are writeable
return (((DynaBean) bean).getDynaClass().getDynaProperty(name) != null);
} else {
try {
PropertyDescriptor desc =
getPropertyDescriptor(bean, name);
if (desc != null) {
Method writeMethod = desc.getWriteMethod();
if (writeMethod == null) {
if (desc instanceof IndexedPropertyDescriptor) {
writeMethod = ((IndexedPropertyDescriptor) desc).getIndexedWriteMethod();
} else if (desc instanceof MappedPropertyDescriptor) {
writeMethod = ((MappedPropertyDescriptor) desc).getMappedWriteMethod();
}
writeMethod = MethodUtils.getAccessibleMethod(bean.getClass(), writeMethod);
}
return (writeMethod != null);
} else {
return (false);
}
} catch (IllegalAccessException e) {
return (false);
} catch (InvocationTargetException e) {
return (false);
} catch (NoSuchMethodException e) {
return (false);
}
}
}
/**
* begin Kuali Foundation modification
* removed comments and @<no space>since javadoc attribute
* end Kuali Foundation modification
* @see org.apache.commons.beanutils.PropertyUtilsBean#getNestedProperty(java.lang.Object, java.lang.String)
*/
public Object getNestedProperty(Object arg0, String arg1) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
// begin Kuali Foundation modification
try {
try {
return fastGetNestedProperty(arg0, arg1);
}
catch (Exception e) {
return super.getNestedProperty(arg0, arg1);
}
}
catch (NestedNullException e) {
return getUnreachableNestedProperty(arg0, arg1);
}
catch (InvocationTargetException e1) {
return getUnreachableNestedProperty(arg0, arg1);
}
// removed commented code
// end Kuali Foundation modification
}
/**
* Customization of superclass getNestedProperty which transparently creates indexed property items
* {@inheritDoc}
*/
public Object getIndexedProperty(Object bean, String name, int index) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
try {
return super.getIndexedProperty(bean, name, index);
} catch (IndexOutOfBoundsException ioobe) {
return generateIndexedProperty(bean, name, index, ioobe);
}
}
protected Object generateIndexedProperty(Object nestedBean, String property, int index, IndexOutOfBoundsException ioobe) throws IllegalAccessException, InvocationTargetException,
NoSuchMethodException {
if (!(nestedBean instanceof PersistableBusinessObject)) throw ioobe;
// we can only grow lists
if (!List.class.isAssignableFrom(getPropertyType(nestedBean, property))) throw ioobe;
List list= (List) getProperty(nestedBean, property);
Class c = collectionItemClassProvider.getCollectionItemClass(nestedBean, property);
if (c == null) {
throw new RuntimeException("Unable to determined item class for collection '" + property + "' on bean of type '" + nestedBean.getClass() + "'");
}
Object value;
try {
value = c.newInstance();
} catch (InstantiationException ie) {
throw new RuntimeException("Error instantiating item class: " + c);
}
// fill any missing indices
while (list.size() <= index) {
list.add(null);
}
list.set(index, value);
return super.getIndexedProperty(nestedBean, property, index);
}
// begin Kuali Foundation modification
/**
* helper method makes sure we don't return "" for collections
*/
private Object getUnreachableNestedProperty(Object arg0, String arg1) {
try {
PropertyDescriptor propertyDescriptor = getPropertyDescriptor(arg0, arg1);
if (propertyDescriptor == null || Collection.class.isAssignableFrom(propertyDescriptor.getPropertyType())) {
return null;
}
} catch (IllegalAccessException e) {
// ignore
} catch (InvocationTargetException e) {
// ignore
} catch (NoSuchMethodException e) {
// ignore
}
return "";
}
// end Kuali Foundation modification
// begin Kuali Foundation modification
/**
* begin Kuali Foundation modification
* Set the value of the (possibly nested) property of the specified name, for the specified bean, with no type conversions.
*
* @param bean Bean whose property is to be modified
* @param name Possibly nested name of the property to be modified
* @param value Value to which the property is to be set
*
* @exception IllegalAccessException if the caller does not have access to the property accessor method
* @exception IllegalArgumentException if <code>bean</code> or <code>name</code> is null
* @exception IllegalArgumentException if a nested reference to a property returns null
* @exception InvocationTargetException if the property accessor method throws an exception
* @exception NoSuchMethodException if an accessor method for this propety cannot be found
* end Kuali Foundation modification
*/
public void setNestedProperty(Object bean, String name, Object value) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
if (bean == null) {
if (LOG.isDebugEnabled()) LOG.debug("No bean specified, name = " + name + ", value = " + value);
return;
}
if (name == null) {
throw new IllegalArgumentException("No name specified");
}
Object propBean = null;
int indexOfINDEXED_DELIM = -1;
int indexOfMAPPED_DELIM = -1;
while (true) {
int delim = name.indexOf(PropertyUtils.NESTED_DELIM);
if (delim < 0) {
break;
}
String next = name.substring(0, delim);
indexOfINDEXED_DELIM = next.indexOf(PropertyUtils.INDEXED_DELIM);
indexOfMAPPED_DELIM = next.indexOf(PropertyUtils.MAPPED_DELIM);
if (bean instanceof Map) {
propBean = ((Map) bean).get(next);
}
else if (indexOfMAPPED_DELIM >= 0) {
propBean = getMappedProperty(bean, next);
}
else if (indexOfINDEXED_DELIM >= 0) {
propBean = getIndexedProperty(bean, next);
}
else {
propBean = getSimpleProperty(bean, next);
}
if (ObjectUtils.isNull(propBean)) {
Class propertyType = getPropertyType(bean, next);
if (propertyType != null) {
Object newInstance = ObjectUtils.createNewObjectFromClass(propertyType);
setSimpleProperty(bean, next, newInstance);
propBean = getSimpleProperty(bean, next);
}
}
bean = propBean;
name = name.substring(delim + 1);
}
indexOfINDEXED_DELIM = name.indexOf(PropertyUtils.INDEXED_DELIM);
indexOfMAPPED_DELIM = name.indexOf(PropertyUtils.MAPPED_DELIM);
if (bean instanceof Map) {
// check to see if the class has a standard property
PropertyDescriptor descriptor = getPropertyDescriptor(bean, name);
if (descriptor == null) {
// no - then put the value into the map
((Map) bean).put(name, value);
}
else {
// yes - use that instead
setSimpleProperty(bean, name, value);
}
}
else if (indexOfMAPPED_DELIM >= 0) {
setMappedProperty(bean, name, value);
}
else if (indexOfINDEXED_DELIM >= 0) {
setIndexedProperty(bean, name, value);
}
else {
setSimpleProperty(bean, name, value);
}
}
// end Kuali Foundation modification
// begin Kuali Foundation modification
/**
* <p>
* Retrieve the property descriptor for the specified property of the specified bean, or return <code>null</code> if there is
* no such descriptor. This method resolves indexed and nested property references in the same manner as other methods in this
* class, except that if the last (or only) name element is indexed, the descriptor for the last resolved property itself is
* returned.
* </p>
*
* <p>
* <strong>FIXME </strong>- Does not work with DynaBeans.
* </p>
*
* @param bean Bean for which a property descriptor is requested
* @param name Possibly indexed and/or nested name of the property for which a property descriptor is requested
*
* @exception IllegalAccessException if the caller does not have access to the property accessor method
* @exception IllegalArgumentException if <code>bean</code> or <code>name</code> is null
* @exception IllegalArgumentException if a nested reference to a property returns null
* @exception InvocationTargetException if the property accessor method throws an exception
* @exception NoSuchMethodException if an accessor method for this propety cannot be found
*/
public PropertyDescriptor getPropertyDescriptor(Object bean, String name) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
if (bean == null) {
if (LOG.isDebugEnabled()) LOG.debug("No bean specified, name = " + name);
return null;
}
if (name == null) {
throw new IllegalArgumentException("No name specified");
}
try {
// Resolve nested references
Object propBean = null;
while (true) {
int delim = findNextNestedIndex(name);
//int delim = name.indexOf(PropertyUtils.NESTED_DELIM);
if (delim < 0) {
break;
}
String next = name.substring(0, delim);
int indexOfINDEXED_DELIM = next.indexOf(PropertyUtils.INDEXED_DELIM);
int indexOfMAPPED_DELIM = next.indexOf(PropertyUtils.MAPPED_DELIM);
if (indexOfMAPPED_DELIM >= 0 && (indexOfINDEXED_DELIM < 0 || indexOfMAPPED_DELIM < indexOfINDEXED_DELIM)) {
propBean = getMappedProperty(bean, next);
}
else {
if (indexOfINDEXED_DELIM >= 0) {
propBean = getIndexedProperty(bean, next);
}
else {
propBean = getSimpleProperty(bean, next);
}
}
if (ObjectUtils.isNull(propBean)) {
Class propertyType = getPropertyType(bean, next);
if (propertyType != null) {
Object newInstance = ObjectUtils.createNewObjectFromClass(propertyType);
setSimpleProperty(bean, next, newInstance);
propBean = getSimpleProperty(bean, next);
}
}
bean = propBean;
name = name.substring(delim + 1);
}
// Remove any subscript from the final name value
int left = name.indexOf(PropertyUtils.INDEXED_DELIM);
if (left >= 0) {
name = name.substring(0, left);
}
left = name.indexOf(PropertyUtils.MAPPED_DELIM);
if (left >= 0) {
name = name.substring(0, left);
}
// Look up and return this property from our cache
// creating and adding it to the cache if not found.
if ((bean == null) || (name == null)) {
return (null);
}
PropertyDescriptor descriptors[] = getPropertyDescriptors(bean);
if (descriptors != null) {
for (int i = 0; i < descriptors.length; i++) {
if (name.equals(descriptors[i].getName()))
return (descriptors[i]);
}
}
PropertyDescriptor result = null;
FastHashMap mappedDescriptors = getMappedPropertyDescriptors(bean);
if (mappedDescriptors == null) {
mappedDescriptors = new FastHashMap();
mappedDescriptors.setFast(true);
}
result = (PropertyDescriptor) mappedDescriptors.get(name);
if (result == null) {
// not found, try to create it
try {
result = new MappedPropertyDescriptor(name, bean.getClass());
}
catch (IntrospectionException ie) {
}
if (result != null) {
mappedDescriptors.put(name, result);
}
}
return result;
} catch ( RuntimeException ex ) {
LOG.error( "Unable to get property descriptor for " + bean.getClass().getName() + " . " + name
+ "\n" + ex.getClass().getName() + ": " + ex.getMessage() );
throw ex;
}
}
// end Kuali Foundation modification
private int findNextNestedIndex(String expression)
{
// walk back from the end to the start
// and find the first index that
int bracketCount = 0;
for (int i=0, size=expression.length(); i<size ; i++) {
char at = expression.charAt(i);
switch (at) {
case PropertyUtils.NESTED_DELIM:
if (bracketCount < 1) {
return i;
}
break;
case PropertyUtils.MAPPED_DELIM:
case PropertyUtils.INDEXED_DELIM:
// not bothered which
++bracketCount;
break;
case PropertyUtils.MAPPED_DELIM2:
case PropertyUtils.INDEXED_DELIM2:
// not bothered which
--bracketCount;
break;
}
}
// can't find any
return -1;
}
/**
* Set the value of the specified simple property of the specified bean,
* with no type conversions.
*
* @param bean Bean whose property is to be modified
* @param name Name of the property to be modified
* @param value Value to which the property should be set
*
* @exception IllegalAccessException if the caller does not have
* access to the property accessor method
* @exception IllegalArgumentException if <code>bean</code> or
* <code>name</code> is null
* @exception IllegalArgumentException if the property name is
* nested or indexed
* @exception InvocationTargetException if the property accessor method
* throws an exception
* @exception NoSuchMethodException if an accessor method for this
* propety cannot be found
*/
public void setSimpleProperty(Object bean,
String name, Object value)
throws IllegalAccessException, InvocationTargetException,
NoSuchMethodException {
if (bean == null) {
if (LOG.isDebugEnabled()) LOG.debug("No bean specified, name = " + name + ", value = " + value);
return;
}
if (name == null) {
throw new IllegalArgumentException("No name specified");
}
// Validate the syntax of the property name
if (name.indexOf(PropertyUtils.NESTED_DELIM) >= 0) {
throw new IllegalArgumentException
("Nested property names are not allowed");
} else if (name.indexOf(PropertyUtils.INDEXED_DELIM) >= 0) {
throw new IllegalArgumentException
("Indexed property names are not allowed");
} else if (name.indexOf(PropertyUtils.MAPPED_DELIM) >= 0) {
throw new IllegalArgumentException
("Mapped property names are not allowed");
}
// Retrieve the property setter method for the specified property
PropertyDescriptor descriptor =
getPropertyDescriptor(bean, name);
if (descriptor == null) {
throw new NoSuchMethodException("Unknown property '" +
name + "'");
}
Method writeMethod = getWriteMethod(descriptor);
if (writeMethod == null) {
//throw new NoSuchMethodException("Property '" + name + "' has no setter method");
LOG.warn("Bean: " + bean.getClass().getName() + ", Property '" + name + "' has no setter method");
return;
}
// Call the property setter method
Object values[] = new Object[1];
values[0] = value;
if (LOG.isDebugEnabled()) {
String valueClassName =
value == null ? "<null>" : value.getClass().getName();
LOG.debug("setSimpleProperty: Invoking method " + writeMethod
+ " with value " + value + " (class " + valueClassName + ")");
}
invokeMethod(writeMethod, bean, values);
}
/** This just catches and wraps IllegalArgumentException. */
private Object invokeMethod(
Method method,
Object bean,
Object[] values)
throws
IllegalAccessException,
InvocationTargetException {
try {
return method.invoke(bean, values);
} catch (IllegalArgumentException e) {
LOG.error("Method invocation failed.", e);
throw new IllegalArgumentException(
"Cannot invoke " + method.getDeclaringClass().getName() + "."
+ method.getName() + " - " + e.getMessage());
}
}
public Class getPropertyType(Object bean, String name)
throws IllegalAccessException, InvocationTargetException,
NoSuchMethodException {
if (bean == null) {
throw new IllegalArgumentException("No bean specified");
}
if (name == null) {
throw new IllegalArgumentException("No name specified for bean class '" +
bean.getClass() + "'");
}
// Resolve nested references
while (getResolver().hasNested(name)) {
String next = getResolver().next(name);
Object nestedBean = getProperty(bean, next);
if (nestedBean == null) {
Class<?>[] paramTypes = {};
Method method = null;
try {
method = bean.getClass().getMethod("get" + next.substring(0, 1).toUpperCase() + next.substring(1), (Class[])null);
} catch (NoSuchMethodException e) {
method = bean.getClass().getMethod("is" + next.substring(0, 1).toUpperCase() + next.substring(1), (Class[])null);
}
try {
nestedBean = ObjectUtils.createNewObjectFromClass(method.getReturnType());
} catch (RuntimeException e) {
NestedNullException nne = new NestedNullException
("Null property value for '" + next +
"' on bean class '" + bean.getClass() + "'");
nne.initCause(e);
throw nne;
}
}
bean = nestedBean;
name = getResolver().remove(name);
}
// Remove any subscript from the final name value
name = getResolver().getProperty(name);
// Special handling for DynaBeans
if (bean instanceof DynaBean) {
DynaProperty descriptor =
((DynaBean) bean).getDynaClass().getDynaProperty(name);
if (descriptor == null) {
return (null);
}
Class type = descriptor.getType();
if (type == null) {
return (null);
} else if (type.isArray()) {
return (type.getComponentType());
} else {
return (type);
}
}
PropertyDescriptor descriptor =
getPropertyDescriptor(bean, name);
if (descriptor == null) {
return (null);
} else if (descriptor instanceof IndexedPropertyDescriptor) {
return (((IndexedPropertyDescriptor) descriptor).
getIndexedPropertyType());
} else if (descriptor instanceof MappedPropertyDescriptor) {
return (((MappedPropertyDescriptor) descriptor).
getMappedPropertyType());
} else {
return (descriptor.getPropertyType());
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.