gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.nn.layers;
import org.deeplearning4j.berkeley.Pair;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.gradient.DefaultGradient;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.params.PretrainParamInitializer;
import org.deeplearning4j.optimize.api.IterationListener;
import org.deeplearning4j.optimize.api.TrainingListener;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.ILossFunction;
import java.util.*;
/**
* Baseline class for any Neural Network used
* as a layer in a deep network *
* @author Adam Gibson
*
*/
public abstract class BasePretrainNetwork<LayerConfT extends org.deeplearning4j.nn.conf.layers.BasePretrainNetwork>
extends BaseLayer<LayerConfT> {
protected Collection<TrainingListener> trainingListeners = null;
public BasePretrainNetwork(NeuralNetConfiguration conf) {
super(conf);
}
public BasePretrainNetwork(NeuralNetConfiguration conf, INDArray input) {
super(conf, input);
}
@Override
public void setListeners(Collection<IterationListener> listeners) {
if (iterationListeners == null)
iterationListeners = new ArrayList<>();
else
iterationListeners.clear();
if (trainingListeners == null)
trainingListeners = new ArrayList<>();
else
trainingListeners.clear();
if (listeners != null && listeners.size() > 0) {
iterationListeners.addAll(listeners);
for (IterationListener il : listeners) {
if (il instanceof TrainingListener) {
trainingListeners.add((TrainingListener) il);
}
}
}
}
@Override
public void setListeners(IterationListener... listeners) {
setListeners(Arrays.asList(listeners));
}
/**
* Corrupts the given input by doing a binomial sampling
* given the corruption level
* @param x the input to corrupt
* @param corruptionLevel the corruption value
* @return the binomial sampled corrupted input
*/
public INDArray getCorruptedInput(INDArray x, double corruptionLevel) {
INDArray corrupted = Nd4j.getDistributions().createBinomial(1, 1 - corruptionLevel).sample(x.shape());
corrupted.muli(x);
return corrupted;
}
protected Gradient createGradient(INDArray wGradient, INDArray vBiasGradient, INDArray hBiasGradient) {
Gradient ret = new DefaultGradient(gradientsFlattened);
// The order of the following statements matter! The gradient is being flattened and applied to
// flattened params in this order.
// The arrays neeed to be views, with the current Updater implementation
//TODO: optimize this, to do it would the assigns
INDArray wg = gradientViews.get(PretrainParamInitializer.WEIGHT_KEY);
wg.assign(wGradient);
INDArray hbg = gradientViews.get(PretrainParamInitializer.BIAS_KEY);
hbg.assign(hBiasGradient);
INDArray vbg = gradientViews.get(PretrainParamInitializer.VISIBLE_BIAS_KEY);
vbg.assign(vBiasGradient);
ret.gradientForVariable().put(PretrainParamInitializer.WEIGHT_KEY, wg);
ret.gradientForVariable().put(PretrainParamInitializer.BIAS_KEY, hbg);
ret.gradientForVariable().put(PretrainParamInitializer.VISIBLE_BIAS_KEY, vbg);
return ret;
}
@Override
public int numParams(boolean backwards) {
return super.numParams(backwards);
}
/**
* Sample the hidden distribution given the visible
* @param v the visible to sample from
* @return the hidden mean and sample
*/
public abstract Pair<INDArray, INDArray> sampleHiddenGivenVisible(INDArray v);
/**
* Sample the visible distribution given the hidden
* @param h the hidden to sample from
* @return the mean and sample
*/
public abstract Pair<INDArray, INDArray> sampleVisibleGivenHidden(INDArray h);
@Override
protected void setScoreWithZ(INDArray z) {
if (input == null || z == null)
throw new IllegalStateException("Cannot calculate score without input and labels " + layerId());
ILossFunction lossFunction = layerConf().getLossFunction().getILossFunction();
//double score = lossFunction.computeScore(input, z, layerConf().getActivationFunction(), maskArray, false);
double score = lossFunction.computeScore(input, z, layerConf().getActivationFn(), maskArray, false);
score += calcL1(false) + calcL2(false);
score /= getInputMiniBatchSize();
this.score = score;
}
@Override
public Map<String, INDArray> paramTable(boolean backpropParamsOnly) {
if (!backpropParamsOnly)
return params;
Map<String, INDArray> map = new LinkedHashMap<>();
map.put(PretrainParamInitializer.WEIGHT_KEY, params.get(PretrainParamInitializer.WEIGHT_KEY));
map.put(PretrainParamInitializer.BIAS_KEY, params.get(PretrainParamInitializer.BIAS_KEY));
return map;
}
public INDArray params() {
List<INDArray> list = new ArrayList<>(2);
for (Map.Entry<String, INDArray> entry : params.entrySet()) {
list.add(entry.getValue());
}
return Nd4j.toFlattened('f', list);
}
/**The number of parameters for the model, for backprop (i.e., excluding visible bias)
* @return the number of parameters for the model (ex. visible bias)
*/
public int numParams() {
int ret = 0;
for (Map.Entry<String, INDArray> entry : params.entrySet()) {
ret += entry.getValue().length();
}
return ret;
}
@Override
public void setParams(INDArray params) {
if (params == paramsFlattened)
return; //No op
//SetParams has two different uses: during pretrain vs. backprop.
//pretrain = 3 sets of params (inc. visible bias); backprop = 2
List<String> parameterList = conf.variables();
int paramLength = 0;
for (String s : parameterList) {
int len = getParam(s).length();
paramLength += len;
}
if (params.length() != paramLength) {
throw new IllegalArgumentException("Unable to set parameters: must be of length " + paramLength
+ ", got params of length " + params.length() + " " + layerId());
}
// Set for backprop and only W & hb
paramsFlattened.assign(params);
}
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) {
Pair<Gradient, INDArray> result = super.backpropGradient(epsilon);
((DefaultGradient) result.getFirst()).setFlattenedGradient(gradientsFlattened);
//During backprop, visible bias gradients are set to 0 - this is necessary due to the gradient view mechanics
// that DL4J uses
INDArray vBiasGradient = gradientViews.get(PretrainParamInitializer.VISIBLE_BIAS_KEY);
result.getFirst().gradientForVariable().put(PretrainParamInitializer.VISIBLE_BIAS_KEY, vBiasGradient);
vBiasGradient.assign(0);
return result;
}
@Override
public double calcL2(boolean backpropParamsOnly) {
if (!conf.isUseRegularization())
return 0.0;
double l2Sum = super.calcL2(true);
if (backpropParamsOnly)
return l2Sum;
if (conf.getL2ByParam(PretrainParamInitializer.VISIBLE_BIAS_KEY) > 0) {
double l2Norm = getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY).norm2Number().doubleValue();
l2Sum += 0.5 * conf.getL2ByParam(PretrainParamInitializer.VISIBLE_BIAS_KEY) * l2Norm * l2Norm;
}
return l2Sum;
}
@Override
public double calcL1(boolean backpropParamsOnly) {
if (!conf.isUseRegularization())
return 0.0;
double l1Sum = super.calcL1(true);
if (conf.getL1ByParam(PretrainParamInitializer.VISIBLE_BIAS_KEY) > 0) {
l1Sum += conf.getL1ByParam(PretrainParamInitializer.VISIBLE_BIAS_KEY)
* getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY).norm1Number().doubleValue();
}
return l1Sum;
}
}
| |
/*
* 2012-4 Red Hat Inc. and/or its affiliates and other contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.rtgov.analytics.situation.store.elasticsearch;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.overlord.commons.services.ServiceRegistryUtil;
import org.overlord.rtgov.analytics.situation.Situation;
import org.overlord.rtgov.analytics.situation.Situation.Severity;
import org.overlord.rtgov.analytics.situation.store.ResolutionState;
import org.overlord.rtgov.analytics.situation.store.SituationStore;
import org.overlord.rtgov.analytics.situation.store.SituationsQuery;
import org.overlord.rtgov.common.elasticsearch.ElasticsearchNode;
import org.overlord.rtgov.common.util.RTGovProperties;
import org.overlord.rtgov.common.util.RTGovPropertiesProvider;
import org.overlord.rtgov.internal.common.elasticsearch.ElasticsearchNodeImpl;
import com.google.common.base.Strings;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.overlord.rtgov.analytics.situation.store.ResolutionState.IN_PROGRESS;
public class ElasticsearchSituationStoreTest {
private static final String TEST_HOST = "theworld";
private static final String SITUATION_ID_1 = "Situation_id_1";
private static final String SITUATION_ID_2 = "Situation_id_2";
private static final String SITUATION_ID_3 = "Situation_id_3";
private static ElasticsearchSituationStore _elasticsearchSituationStore;
private static ElasticsearchNodeImpl _node=null;
/**
* elastic search index to test against
*/
private static String INDEX = "rtgovtest";
/**
* elastich search host
*/
private static String HOST = "embedded";
/**
* elasticsearch port
*/
private static int PORT = 9300;
/**
* elasticsearch type to test
*/
private static String TYPE = "situation";
public static class TestPropertiesProvider implements RTGovPropertiesProvider {
private java.util.Properties _properties = new java.util.Properties();
public TestPropertiesProvider() {
System.setProperty("elasticsearch.config", "ElasticsearchSituationStoreTest-es.properties");
_properties = new Properties();
_properties.setProperty("Elasticsearch.hosts", HOST + ":" + 9300);
_properties.setProperty("Elasticsearch.schedule", "3000");
_properties.setProperty("SituationStore.Elasticsearch.type", TYPE);
_properties.setProperty("SituationStore.Elasticsearch.index", INDEX);
_properties.setProperty("elasticsearch.config", "ElasticsearchSituationStoreTest-es.properties");
}
public String getProperty(String name) {
return _properties.getProperty(name);
}
public Properties getProperties() {
return _properties;
}
}
/**
* tear down test index again.
* @throws Exception
*/
@AfterClass
public static void tearDown() throws Exception {
Client c = new TransportClient();
if (HOST.equals("embedded")) {
c = _node.getClient();
} else {
c = new TransportClient().addTransportAddress(new InetSocketTransportAddress(HOST, PORT));
}
c.admin().indices().prepareDelete(INDEX).execute().actionGet();
if (_node != null) {
_node.close();
}
if (_elasticsearchSituationStore != null) {
_elasticsearchSituationStore.close();
}
}
/**
* tear down after test.^
* @throws Exception
*/
@BeforeClass
public static void initialiseStore() throws Exception {
TestPropertiesProvider provider = new TestPropertiesProvider();
Client c = new TransportClient();
if (HOST.equals("embedded")) {
_node = (ElasticsearchNodeImpl)ServiceRegistryUtil.getSingleService(ElasticsearchNode.class);
_node.init();
c = _node.getClient();
} else {
c = new TransportClient().addTransportAddress(new InetSocketTransportAddress(HOST, PORT));
}
// remove index.
if (c.admin().indices().prepareExists(INDEX).execute().actionGet().isExists()) {
c.admin().indices().prepareDelete(INDEX).execute().actionGet();
}
RTGovProperties.setPropertiesProvider(provider);
_elasticsearchSituationStore = new ElasticsearchSituationStore();
_elasticsearchSituationStore.init();
}
@org.junit.Before
public void removeSituations() {
_elasticsearchSituationStore.delete(new SituationsQuery());
}
@Test
public void testStoreAndGetSituation() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
Situation s1 = _elasticsearchSituationStore.getSituation(SITUATION_ID_1);
if (s1 != null) {
if (!s1.getId().equals(SITUATION_ID_1))
fail("Situation id mismatch");
} else
fail("Situation is null");
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQueryAllSituations() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
_elasticsearchSituationStore.store(s2);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(null);
if (sits != null) {
if (sits.size() != 2) {
fail("Expecting 2 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_1)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_1+"', but got: "+sits.get(0).getId());
}
if (!sits.get(1).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 2 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(1).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQueryAllSituationsWithSizeLimit() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
_elasticsearchSituationStore.store(s2);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
int size=_elasticsearchSituationStore.getResponseSize();
_elasticsearchSituationStore.setResponseSize(1);
try {
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(null);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situation: "+sits.size());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
} finally {
_elasticsearchSituationStore.setResponseSize(size);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsResolutionStateResolved() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.getSituationProperties().put(SituationStore.RESOLUTION_STATE_PROPERTY, ResolutionState.RESOLVED.name());
_elasticsearchSituationStore.store(s2);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setResolutionState(ResolutionState.RESOLVED.name());
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsResolutionStateUnresolved() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.getSituationProperties().put(SituationStore.RESOLUTION_STATE_PROPERTY, ResolutionState.RESOLVED.name());
_elasticsearchSituationStore.store(s2);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setResolutionState(ResolutionState.UNRESOLVED.name());
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_1)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_1+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsResolutionStateOpen() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.getSituationProperties().put(SituationStore.RESOLUTION_STATE_PROPERTY, ResolutionState.RESOLVED.name());
_elasticsearchSituationStore.store(s2);
Situation s3=new Situation();
s3.setId(SITUATION_ID_3);
s3.setTimestamp(System.currentTimeMillis()+200);
s3.getSituationProperties().put(SituationStore.RESOLUTION_STATE_PROPERTY, ResolutionState.IN_PROGRESS.name());
_elasticsearchSituationStore.store(s3);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setResolutionState(ResolutionState.OPEN.name());
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 2) {
fail("Expecting 2 situations: "+sits.size());
}
assertTrue(!sits.get(0).getSituationProperties().containsKey(SituationStore.RESOLUTION_STATE_PROPERTY)
|| !sits.get(0).getSituationProperties().get(
SituationStore.RESOLUTION_STATE_PROPERTY).equalsIgnoreCase(ResolutionState.RESOLVED.name()));
assertTrue(!sits.get(1).getSituationProperties().containsKey(SituationStore.RESOLUTION_STATE_PROPERTY)
|| !sits.get(1).getSituationProperties().get(
SituationStore.RESOLUTION_STATE_PROPERTY).equalsIgnoreCase(ResolutionState.RESOLVED.name()));
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_3);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsHost() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.getSituationProperties().put(SituationStore.HOST_PROPERTY, TEST_HOST);
_elasticsearchSituationStore.store(s2);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setProperties("host="+TEST_HOST);
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsResolvedANDHost() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
s1.getSituationProperties().put(SituationStore.RESOLUTION_STATE_PROPERTY, ResolutionState.RESOLVED.name());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.getSituationProperties().put(SituationStore.HOST_PROPERTY, TEST_HOST);
s2.getSituationProperties().put(SituationStore.RESOLUTION_STATE_PROPERTY, ResolutionState.RESOLVED.name());
_elasticsearchSituationStore.store(s2);
Situation s3=new Situation();
s3.setId(SITUATION_ID_3);
s3.setTimestamp(System.currentTimeMillis()+200);
s3.getSituationProperties().put(SituationStore.HOST_PROPERTY, TEST_HOST);
_elasticsearchSituationStore.store(s3);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setProperties("host="+TEST_HOST);
query.setResolutionState(ResolutionState.RESOLVED.name());
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_3);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsDescription() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.setDescription("An error occurred");
_elasticsearchSituationStore.store(s2);
Situation s3=new Situation();
s3.setId(SITUATION_ID_3);
s3.setTimestamp(System.currentTimeMillis()+200);
s3.setDescription("Have a nice day");
_elasticsearchSituationStore.store(s3);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setDescription("error");
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_3);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsSubjectLike() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
// NOTE: 'Like' only appears to work on whole words, so if OrderService is the subject
// then a search on Order will not find it.
s2.setSubject("Order Service");
_elasticsearchSituationStore.store(s2);
Situation s3=new Situation();
s3.setId(SITUATION_ID_3);
s3.setTimestamp(System.currentTimeMillis()+200);
s3.setSubject("InventoryService");
_elasticsearchSituationStore.store(s3);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setSubject("Order");
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_3);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsSubjectExact() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.setSubject("OrderService");
_elasticsearchSituationStore.store(s2);
Situation s3=new Situation();
s3.setId(SITUATION_ID_3);
s3.setTimestamp(System.currentTimeMillis()+200);
s3.setSubject("InventoryService");
_elasticsearchSituationStore.store(s3);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setSubject("OrderService");
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_3);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsType() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.setType("SLA Violation");
_elasticsearchSituationStore.store(s2);
Situation s3=new Situation();
s3.setId(SITUATION_ID_3);
s3.setTimestamp(System.currentTimeMillis()+200);
s3.setType("Exception");
_elasticsearchSituationStore.store(s3);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setType("SLA");
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_3);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsSeverityHigh() {
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()+100);
s2.setSeverity(Severity.High);
_elasticsearchSituationStore.store(s2);
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setSeverity(Severity.High);
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsTimestampFrom() {
long from=0;
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis()-10000);
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()-5000);
_elasticsearchSituationStore.store(s2);
from = s2.getTimestamp();
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setFromTimestamp(from);
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsTimestampTo() {
long to=0;
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis()-10000);
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()-5000);
_elasticsearchSituationStore.store(s2);
to = s1.getTimestamp();
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setToTimestamp(to);
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_1)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_1+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void testQuerySituationsTimestampFromTo() {
long from=0;
long to=0;
try {
Situation s1=new Situation();
s1.setId(SITUATION_ID_1);
s1.setTimestamp(System.currentTimeMillis()-10000);
_elasticsearchSituationStore.store(s1);
Situation s2=new Situation();
s2.setId(SITUATION_ID_2);
s2.setTimestamp(System.currentTimeMillis()-5000);
_elasticsearchSituationStore.store(s2);
Situation s3=new Situation();
s3.setId(SITUATION_ID_3);
s3.setTimestamp(System.currentTimeMillis()-1000);
_elasticsearchSituationStore.store(s3);
from = s1.getTimestamp()+100;
to = s3.getTimestamp()-100;
// Need to delay to allow situations to be index, and therefore become searchable
synchronized (this) {
wait(2000);
}
} catch (Exception e) {
fail("Could not store situation " + e);
}
try {
SituationsQuery query=new SituationsQuery();
query.setFromTimestamp(from);
query.setToTimestamp(to);
java.util.List<Situation> sits = _elasticsearchSituationStore.getSituations(query);
if (sits != null) {
if (sits.size() != 1) {
fail("Expecting 1 situations: "+sits.size());
}
if (!sits.get(0).getId().equals(SITUATION_ID_2)) {
fail("Expecting entry 1 to have id '"+SITUATION_ID_2+"', but got: "+sits.get(0).getId());
}
} else {
fail("Situations list is null");
}
} catch (Exception e) {
fail("Failed to get situation: " + e);
}
try {
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_1);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_2);
_elasticsearchSituationStore.getClient().remove(SITUATION_ID_3);
} catch (Exception e) {
fail("Could not remove situation" + e);
}
}
@Test
public void assignSituation() throws Exception {
Situation situation = new Situation();
situation.setId("assignSituation");
situation.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(situation);
Situation reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals(situation.getId(), reload.getId());
assertFalse(reload.getSituationProperties().containsKey(SituationStore.ASSIGNED_TO_PROPERTY));
assertFalse(reload.getSituationProperties().containsKey(SituationStore.RESOLUTION_STATE_PROPERTY));
_elasticsearchSituationStore.assignSituation(situation.getId(), "junit");
reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals("junit",reload.getSituationProperties().get(SituationStore.ASSIGNED_TO_PROPERTY));
_elasticsearchSituationStore.getClient().remove(situation.getId());
}
@Test
public void closeSituationAndRemoveAssignment() throws Exception {
Situation situation = new Situation();
situation.setId("closeSituationAndRemoveAssignment");
situation.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(situation);
_elasticsearchSituationStore.assignSituation(situation.getId(), "junit");
Situation reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals("junit",reload.getSituationProperties().get("assignedTo"));
_elasticsearchSituationStore.unassignSituation(situation.getId());
reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertFalse(reload.getSituationProperties().containsKey("assignedTo"));
}
@Test
public void deleteSituation() throws Exception {
Situation situation = new Situation();
situation.setId("deleteSituation");
situation.setDescription("deleteSituation");
situation.setTimestamp(System.currentTimeMillis());
situation.setSituationProperties(Collections.singletonMap("1", "1"));
_elasticsearchSituationStore.store(situation);
// Changes are not atomic, so need to delay to ensure the search index is updated
try {
synchronized(this) {
wait(2000);
}
} catch (Exception e) {
fail("Failed to wait");
}
SituationsQuery situationQuery = new SituationsQuery();
situationQuery.setDescription(situation.getDescription());
_elasticsearchSituationStore.delete(situationQuery);
// Changes are not atomic, so need to delay to ensure the search index is updated
try {
synchronized(this) {
wait(2000);
}
} catch (Exception e) {
fail("Failed to wait");
}
List<Situation> situations = _elasticsearchSituationStore.getSituations(situationQuery);
assertTrue(situations.isEmpty());
}
@Test
public void closeSituationResetOpenResolution() throws Exception {
Situation situation = new Situation();
situation.setId("closeSituationResetOpenResolution");
situation.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(situation);
_elasticsearchSituationStore.assignSituation(situation.getId(), "junit");
_elasticsearchSituationStore.updateResolutionState(situation.getId(),IN_PROGRESS);
Situation reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals("junit",reload.getSituationProperties().get(SituationStore.ASSIGNED_TO_PROPERTY));
_elasticsearchSituationStore.unassignSituation(situation.getId());
reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertFalse(reload.getSituationProperties().containsKey(SituationStore.RESOLUTION_STATE_PROPERTY));
assertFalse(reload.getSituationProperties().containsKey(SituationStore.ASSIGNED_TO_PROPERTY));
}
@Test
public void updateResolutionState() throws Exception {
Situation situation = new Situation();
situation.setId("updateResolutionState");
situation.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(situation);
Situation reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertFalse(reload.getSituationProperties().containsKey(SituationStore.RESOLUTION_STATE_PROPERTY));
_elasticsearchSituationStore.updateResolutionState(situation.getId(),ResolutionState.IN_PROGRESS);
reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals(ResolutionState.IN_PROGRESS.name(), reload.getSituationProperties().get(SituationStore.RESOLUTION_STATE_PROPERTY));
}
@Test
public void recordResubmit() throws Exception {
Situation situation = new Situation();
situation.setId("recordResubmit");
situation.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(situation);
_elasticsearchSituationStore.recordSuccessfulResubmit(situation.getId(), "recordResubmit");
Situation reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals("recordResubmit", reload.getSituationProperties().get(SituationStore.RESUBMIT_BY_PROPERTY));
assertEquals(SituationStore.RESUBMIT_RESULT_SUCCESS, reload.getSituationProperties().get(SituationStore.RESUBMIT_RESULT_PROPERTY));
assertTrue(reload.getSituationProperties().containsKey(SituationStore.RESUBMIT_AT_PROPERTY));
assertFalse(reload.getSituationProperties().containsKey(SituationStore.RESUBMIT_ERROR_MESSAGE));
}
@Test
public void recordResubmitFailure() throws Exception {
String name="recordResubmitFailure";
Situation situation = new Situation();
situation.setId(name);
situation.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(situation);
_elasticsearchSituationStore.recordResubmitFailure(situation.getId(), name, name);
Situation reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals(name, reload.getSituationProperties().get(SituationStore.RESUBMIT_BY_PROPERTY));
assertEquals(name, reload.getSituationProperties().get(SituationStore.RESUBMIT_ERROR_MESSAGE));
assertTrue(reload.getSituationProperties().containsKey(SituationStore.RESUBMIT_AT_PROPERTY));
assertEquals(SituationStore.RESUBMIT_RESULT_ERROR,
reload.getSituationProperties().get(SituationStore.RESUBMIT_RESULT_PROPERTY));
}
@Test
public void recordResubmitErrorMessageMaxLength() throws Exception {
String name="recordResubmitErrorMessageMaxLength";
Situation situation = new Situation();
situation.setId(name);
situation.setTimestamp(System.currentTimeMillis());
_elasticsearchSituationStore.store(situation);
_elasticsearchSituationStore.recordResubmitFailure(situation.getId(),
Strings.padEnd(name, 10000, '*'), name);
Situation reload = _elasticsearchSituationStore.getSituation(situation.getId());
assertEquals(name, reload.getSituationProperties().get(SituationStore.RESUBMIT_BY_PROPERTY));
String errorMessage = reload.getSituationProperties().get(SituationStore.RESUBMIT_ERROR_MESSAGE);
assertEquals(Strings.padEnd(name, 250, '*'), errorMessage);
assertTrue(reload.getSituationProperties().containsKey(SituationStore.RESUBMIT_AT_PROPERTY));
assertEquals(SituationStore.RESUBMIT_RESULT_ERROR,
reload.getSituationProperties().get(SituationStore.RESUBMIT_RESULT_PROPERTY));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.realtime.plumber;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.druid.client.cache.CachePopulatorStats;
import org.apache.druid.client.cache.MapCache;
import org.apache.druid.data.input.Committer;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.Row;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.JSONParseSpec;
import org.apache.druid.data.input.impl.StringInputRowParser;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.ReferenceCountingSegment;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.RealtimeTuningConfig;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.segment.join.NoopJoinableFactory;
import org.apache.druid.segment.loading.DataSegmentPusher;
import org.apache.druid.segment.realtime.FireDepartmentMetrics;
import org.apache.druid.segment.realtime.FireDepartmentTest;
import org.apache.druid.segment.realtime.FireHydrant;
import org.apache.druid.segment.realtime.SegmentPublisher;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory;
import org.apache.druid.server.coordination.DataSegmentAnnouncer;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.easymock.EasyMock;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
*
*/
@RunWith(Parameterized.class)
public class RealtimePlumberSchoolTest extends InitializedNullHandlingTest
{
@Parameterized.Parameters(name = "rejectionPolicy = {0}, segmentWriteOutMediumFactory = {1}")
public static Collection<?> constructorFeeder()
{
final RejectionPolicyFactory[] rejectionPolicies = new RejectionPolicyFactory[]{
new NoopRejectionPolicyFactory(),
new MessageTimeRejectionPolicyFactory()
};
final List<Object[]> constructors = new ArrayList<>();
for (RejectionPolicyFactory rejectionPolicy : rejectionPolicies) {
constructors.add(new Object[]{rejectionPolicy, OffHeapMemorySegmentWriteOutMediumFactory.instance()});
constructors.add(new Object[]{rejectionPolicy, TmpFileSegmentWriteOutMediumFactory.instance()});
}
return constructors;
}
private final RejectionPolicyFactory rejectionPolicy;
private final SegmentWriteOutMediumFactory segmentWriteOutMediumFactory;
private RealtimePlumber plumber;
private RealtimePlumberSchool realtimePlumberSchool;
private DataSegmentAnnouncer announcer;
private SegmentPublisher segmentPublisher;
private DataSegmentPusher dataSegmentPusher;
private SegmentHandoffNotifier handoffNotifier;
private SegmentHandoffNotifierFactory handoffNotifierFactory;
private ServiceEmitter emitter;
private RealtimeTuningConfig tuningConfig;
private DataSchema schema;
private DataSchema schema2;
private FireDepartmentMetrics metrics;
private File tmpDir;
public RealtimePlumberSchoolTest(
RejectionPolicyFactory rejectionPolicy,
SegmentWriteOutMediumFactory segmentWriteOutMediumFactory
)
{
this.rejectionPolicy = rejectionPolicy;
this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
}
@Before
public void setUp() throws Exception
{
tmpDir = FileUtils.createTempDir();
ObjectMapper jsonMapper = new DefaultObjectMapper();
schema = new DataSchema(
"test",
jsonMapper.convertValue(
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("timestamp", "auto", null),
new DimensionsSpec(null, null, null),
null,
null,
null
),
null
),
Map.class
),
new AggregatorFactory[]{new CountAggregatorFactory("rows")},
new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null),
null,
jsonMapper
);
schema2 = new DataSchema(
"test",
jsonMapper.convertValue(
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("timestamp", "auto", null),
new DimensionsSpec(null, null, null),
null,
null,
null
),
null
),
Map.class
),
new AggregatorFactory[]{new CountAggregatorFactory("rows")},
new UniformGranularitySpec(Granularities.YEAR, Granularities.NONE, null),
null,
jsonMapper
);
announcer = EasyMock.createMock(DataSegmentAnnouncer.class);
announcer.announceSegment(EasyMock.anyObject());
EasyMock.expectLastCall().anyTimes();
segmentPublisher = EasyMock.createNiceMock(SegmentPublisher.class);
dataSegmentPusher = EasyMock.createNiceMock(DataSegmentPusher.class);
handoffNotifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
handoffNotifier = EasyMock.createNiceMock(SegmentHandoffNotifier.class);
EasyMock.expect(handoffNotifierFactory.createSegmentHandoffNotifier(EasyMock.anyString()))
.andReturn(handoffNotifier)
.anyTimes();
EasyMock.expect(
handoffNotifier.registerSegmentHandoffCallback(
EasyMock.anyObject(),
EasyMock.anyObject(),
EasyMock.anyObject()
)
).andReturn(true).anyTimes();
emitter = EasyMock.createMock(ServiceEmitter.class);
EasyMock.replay(announcer, segmentPublisher, dataSegmentPusher, handoffNotifierFactory, handoffNotifier, emitter);
tuningConfig = new RealtimeTuningConfig(
null,
1,
null,
null,
null,
null,
null,
new IntervalStartVersioningPolicy(),
rejectionPolicy,
null,
null,
null,
null,
true,
0,
0,
false,
null,
null,
null,
null
);
realtimePlumberSchool = new RealtimePlumberSchool(
emitter,
new DefaultQueryRunnerFactoryConglomerate(new HashMap<>()),
dataSegmentPusher,
announcer,
segmentPublisher,
handoffNotifierFactory,
Execs.directExecutor(),
NoopJoinableFactory.INSTANCE,
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory),
TestHelper.getTestIndexIO(),
MapCache.create(0),
FireDepartmentTest.NO_CACHE_CONFIG,
new CachePopulatorStats(),
TestHelper.makeJsonMapper()
);
metrics = new FireDepartmentMetrics();
plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema, tuningConfig, metrics);
}
@After
public void tearDown() throws Exception
{
EasyMock.verify(announcer, segmentPublisher, dataSegmentPusher, handoffNotifierFactory, handoffNotifier, emitter);
FileUtils.deleteDirectory(
new File(
tuningConfig.getBasePersistDirectory(),
schema.getDataSource()
)
);
FileUtils.deleteDirectory(tmpDir);
}
@Test(timeout = 60_000L)
public void testPersist() throws Exception
{
testPersist(null);
}
@Test(timeout = 60_000L)
public void testPersistWithCommitMetadata() throws Exception
{
final Object commitMetadata = "dummyCommitMetadata";
testPersist(commitMetadata);
plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema, tuningConfig, metrics);
Assert.assertEquals(commitMetadata, plumber.startJob());
}
private void testPersist(final Object commitMetadata) throws Exception
{
Sink sink = new Sink(
Intervals.utc(0, TimeUnit.HOURS.toMillis(1)),
schema,
tuningConfig.getShardSpec(),
DateTimes.of("2014-12-01T12:34:56.789").toString(),
tuningConfig.getAppendableIndexSpec(),
tuningConfig.getMaxRowsInMemory(),
tuningConfig.getMaxBytesInMemoryOrDefault(),
tuningConfig.getDedupColumn()
);
plumber.getSinks().put(0L, sink);
Assert.assertNull(plumber.startJob());
final InputRow row = EasyMock.createNiceMock(InputRow.class);
EasyMock.expect(row.getTimestampFromEpoch()).andReturn(0L);
EasyMock.expect(row.getDimensions()).andReturn(new ArrayList<String>());
EasyMock.replay(row);
final CountDownLatch doneSignal = new CountDownLatch(1);
final Committer committer = new Committer()
{
@Override
public Object getMetadata()
{
return commitMetadata;
}
@Override
public void run()
{
doneSignal.countDown();
}
};
plumber.add(row, Suppliers.ofInstance(committer));
plumber.persist(committer);
doneSignal.await();
plumber.getSinks().clear();
plumber.finishJob();
}
@Test(timeout = 60_000L)
public void testPersistFails() throws Exception
{
Sink sink = new Sink(
Intervals.utc(0, TimeUnit.HOURS.toMillis(1)),
schema,
tuningConfig.getShardSpec(),
DateTimes.of("2014-12-01T12:34:56.789").toString(),
tuningConfig.getAppendableIndexSpec(),
tuningConfig.getMaxRowsInMemory(),
tuningConfig.getMaxBytesInMemoryOrDefault(),
tuningConfig.getDedupColumn()
);
plumber.getSinks().put(0L, sink);
plumber.startJob();
final InputRow row = EasyMock.createNiceMock(InputRow.class);
EasyMock.expect(row.getTimestampFromEpoch()).andReturn(0L);
EasyMock.expect(row.getDimensions()).andReturn(new ArrayList<String>());
EasyMock.replay(row);
plumber.add(row, Suppliers.ofInstance(Committers.nil()));
final CountDownLatch doneSignal = new CountDownLatch(1);
plumber.persist(
supplierFromRunnable(
() -> {
doneSignal.countDown();
throw new RuntimeException();
}
).get()
);
doneSignal.await();
// Exception may need time to propagate
while (metrics.failedPersists() < 1) {
Thread.sleep(100);
}
Assert.assertEquals(1, metrics.failedPersists());
}
@Test(timeout = 60_000L)
public void testPersistHydrantGaps() throws Exception
{
final Object commitMetadata = "dummyCommitMetadata";
testPersistHydrantGapsHelper(commitMetadata);
}
private void testPersistHydrantGapsHelper(final Object commitMetadata) throws Exception
{
Interval testInterval = new Interval(DateTimes.of("1970-01-01"), DateTimes.of("1971-01-01"));
RealtimePlumber plumber2 = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema2, tuningConfig, metrics);
Sink sink = new Sink(
testInterval,
schema2,
tuningConfig.getShardSpec(),
DateTimes.of("2014-12-01T12:34:56.789").toString(),
tuningConfig.getAppendableIndexSpec(),
tuningConfig.getMaxRowsInMemory(),
tuningConfig.getMaxBytesInMemoryOrDefault(),
tuningConfig.getDedupColumn()
);
plumber2.getSinks().put(0L, sink);
Assert.assertNull(plumber2.startJob());
final CountDownLatch doneSignal = new CountDownLatch(1);
final Committer committer = new Committer()
{
@Override
public Object getMetadata()
{
return commitMetadata;
}
@Override
public void run()
{
doneSignal.countDown();
}
};
plumber2.add(getTestInputRow("1970-01-01"), Suppliers.ofInstance(committer));
plumber2.add(getTestInputRow("1970-02-01"), Suppliers.ofInstance(committer));
plumber2.add(getTestInputRow("1970-03-01"), Suppliers.ofInstance(committer));
plumber2.add(getTestInputRow("1970-04-01"), Suppliers.ofInstance(committer));
plumber2.add(getTestInputRow("1970-05-01"), Suppliers.ofInstance(committer));
plumber2.persist(committer);
doneSignal.await();
plumber2.getSinks().clear();
plumber2.finishJob();
File persistDir = plumber2.computePersistDir(schema2, testInterval);
/* Check that all hydrants were persisted */
for (int i = 0; i < 5; i++) {
Assert.assertTrue(new File(persistDir, String.valueOf(i)).exists());
}
/* Create some gaps in the persisted hydrants and reload */
FileUtils.deleteDirectory(new File(persistDir, "1"));
FileUtils.deleteDirectory(new File(persistDir, "3"));
RealtimePlumber restoredPlumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(
schema2,
tuningConfig,
metrics
);
restoredPlumber.bootstrapSinksFromDisk();
Map<Long, Sink> sinks = restoredPlumber.getSinks();
Assert.assertEquals(1, sinks.size());
List<FireHydrant> hydrants = Lists.newArrayList(sinks.get(new Long(0)));
DateTime startTime = DateTimes.of("1970-01-01T00:00:00.000Z");
Interval expectedInterval = new Interval(startTime, DateTimes.of("1971-01-01T00:00:00.000Z"));
Assert.assertEquals(0, hydrants.get(0).getCount());
Assert.assertEquals(
expectedInterval,
hydrants.get(0).getSegmentDataInterval()
);
Assert.assertEquals(2, hydrants.get(1).getCount());
Assert.assertEquals(
expectedInterval,
hydrants.get(1).getSegmentDataInterval()
);
Assert.assertEquals(4, hydrants.get(2).getCount());
Assert.assertEquals(
expectedInterval,
hydrants.get(2).getSegmentDataInterval()
);
/* Delete all the hydrants and reload, no sink should be created */
FileUtils.deleteDirectory(new File(persistDir, "0"));
FileUtils.deleteDirectory(new File(persistDir, "2"));
FileUtils.deleteDirectory(new File(persistDir, "4"));
RealtimePlumber restoredPlumber2 = (RealtimePlumber) realtimePlumberSchool.findPlumber(
schema2,
tuningConfig,
metrics
);
restoredPlumber2.bootstrapSinksFromDisk();
Assert.assertEquals(0, restoredPlumber2.getSinks().size());
}
@Test(timeout = 60_000L)
public void testDimOrderInheritance() throws Exception
{
final Object commitMetadata = "dummyCommitMetadata";
testDimOrderInheritanceHelper(commitMetadata);
}
private void testDimOrderInheritanceHelper(final Object commitMetadata) throws Exception
{
List<List<String>> expectedDims = ImmutableList.of(
ImmutableList.of("dimD"),
ImmutableList.of("dimC"),
ImmutableList.of("dimA"),
ImmutableList.of("dimB"),
ImmutableList.of("dimE"),
ImmutableList.of("dimD", "dimC", "dimA", "dimB", "dimE")
);
QueryableIndex qindex;
FireHydrant hydrant;
Map<Long, Sink> sinks;
RealtimePlumber plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema2, tuningConfig, metrics);
Assert.assertNull(plumber.startJob());
final CountDownLatch doneSignal = new CountDownLatch(1);
final Committer committer = new Committer()
{
@Override
public Object getMetadata()
{
return commitMetadata;
}
@Override
public void run()
{
doneSignal.countDown();
}
};
plumber.add(
getTestInputRowFull(
"1970-01-01",
ImmutableList.of("dimD"),
ImmutableList.of("1")
),
Suppliers.ofInstance(committer)
);
plumber.add(
getTestInputRowFull(
"1970-01-01",
ImmutableList.of("dimC"),
ImmutableList.of("1")
),
Suppliers.ofInstance(committer)
);
plumber.add(
getTestInputRowFull(
"1970-01-01",
ImmutableList.of("dimA"),
ImmutableList.of("1")
),
Suppliers.ofInstance(committer)
);
plumber.add(
getTestInputRowFull(
"1970-01-01",
ImmutableList.of("dimB"),
ImmutableList.of("1")
),
Suppliers.ofInstance(committer)
);
plumber.add(
getTestInputRowFull(
"1970-01-01",
ImmutableList.of("dimE"),
ImmutableList.of("1")
),
Suppliers.ofInstance(committer)
);
plumber.add(
getTestInputRowFull(
"1970-01-01",
ImmutableList.of("dimA", "dimB", "dimC", "dimD", "dimE"),
ImmutableList.of("1")
),
Suppliers.ofInstance(committer)
);
plumber.persist(committer);
doneSignal.await();
plumber.getSinks().clear();
plumber.finishJob();
RealtimePlumber restoredPlumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(
schema2,
tuningConfig,
metrics
);
restoredPlumber.bootstrapSinksFromDisk();
sinks = restoredPlumber.getSinks();
Assert.assertEquals(1, sinks.size());
List<FireHydrant> hydrants = Lists.newArrayList(sinks.get(0L));
for (int i = 0; i < hydrants.size(); i++) {
hydrant = hydrants.get(i);
ReferenceCountingSegment segment = hydrant.getIncrementedSegment();
try {
qindex = segment.asQueryableIndex();
Assert.assertEquals(i, hydrant.getCount());
Assert.assertEquals(expectedDims.get(i), ImmutableList.copyOf(qindex.getAvailableDimensions()));
}
finally {
segment.decrement();
}
}
}
private InputRow getTestInputRow(final String timeStr)
{
return new InputRow()
{
@Override
public List<String> getDimensions()
{
return new ArrayList<>();
}
@Override
public long getTimestampFromEpoch()
{
return DateTimes.of(timeStr).getMillis();
}
@Override
public DateTime getTimestamp()
{
return DateTimes.of(timeStr);
}
@Override
public List<String> getDimension(String dimension)
{
return new ArrayList<>();
}
@Override
public Number getMetric(String metric)
{
return 0;
}
@Override
public Object getRaw(String dimension)
{
return null;
}
@Override
public int compareTo(Row o)
{
return 0;
}
};
}
private InputRow getTestInputRowFull(final String timeStr, final List<String> dims, final List<String> dimVals)
{
return new InputRow()
{
@Override
public List<String> getDimensions()
{
return dims;
}
@Override
public long getTimestampFromEpoch()
{
return DateTimes.of(timeStr).getMillis();
}
@Override
public DateTime getTimestamp()
{
return DateTimes.of(timeStr);
}
@Override
public List<String> getDimension(String dimension)
{
return dimVals;
}
@Override
public Number getMetric(String metric)
{
return 0;
}
@Override
public Object getRaw(String dimension)
{
return dimVals;
}
@Override
public int compareTo(Row o)
{
return 0;
}
};
}
private static Supplier<Committer> supplierFromRunnable(final Runnable runnable)
{
final Committer committer = new Committer()
{
@Override
public Object getMetadata()
{
return null;
}
@Override
public void run()
{
runnable.run();
}
};
return Suppliers.ofInstance(committer);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.codeInsight.override;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.intellij.codeInsight.CodeInsightUtilCore;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.featureStatistics.ProductivityFeatureNames;
import com.intellij.ide.util.MemberChooser;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.ui.SpeedSearchComparator;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyFunctionBuilder;
import com.jetbrains.python.psi.impl.PyPsiUtils;
import com.jetbrains.python.psi.types.PyClassLikeType;
import com.jetbrains.python.psi.types.PyNoneType;
import com.jetbrains.python.psi.types.PyTypeUtil;
import com.jetbrains.python.psi.types.TypeEvalContext;
/**
* @author Alexey.Ivanov
*/
public class PyOverrideImplementUtil
{
@Nullable
public static PyClass getContextClass(@Nonnull final Editor editor, @Nonnull final PsiFile file)
{
int offset = editor.getCaretModel().getOffset();
PsiElement element = file.findElementAt(offset);
if(element == null)
{
// are we in whitespace after last class? PY-440
final PsiElement lastChild = file.getLastChild();
if(lastChild != null &&
offset >= lastChild.getTextRange().getStartOffset() &&
offset <= lastChild.getTextRange().getEndOffset())
{
element = lastChild;
}
}
final PyClass pyClass = PsiTreeUtil.getParentOfType(element, PyClass.class, false);
if(pyClass == null && element instanceof PsiWhiteSpace && element.getPrevSibling() instanceof PyClass)
{
return (PyClass) element.getPrevSibling();
}
return pyClass;
}
public static void chooseAndOverrideMethods(final Project project, @Nonnull final Editor editor, @Nonnull final PyClass pyClass)
{
FeatureUsageTracker.getInstance().triggerFeatureUsed(ProductivityFeatureNames.CODEASSISTS_OVERRIDE_IMPLEMENT);
chooseAndOverrideOrImplementMethods(project, editor, pyClass);
}
private static void chooseAndOverrideOrImplementMethods(final Project project, @Nonnull final Editor editor, @Nonnull final PyClass pyClass)
{
PyPsiUtils.assertValid(pyClass);
ApplicationManager.getApplication().assertReadAccessAllowed();
final Set<PyFunction> result = new HashSet<>();
TypeEvalContext context = TypeEvalContext.codeCompletion(project, null);
final Collection<PyFunction> superFunctions = getAllSuperFunctions(pyClass, context);
result.addAll(superFunctions);
chooseAndOverrideOrImplementMethods(project, editor, pyClass, result, "Select Methods to Override", false);
}
public static void chooseAndOverrideOrImplementMethods(@Nonnull final Project project,
@Nonnull final Editor editor,
@Nonnull final PyClass pyClass,
@Nonnull final Collection<PyFunction> superFunctions,
@Nonnull final String title,
final boolean implement)
{
List<PyMethodMember> elements = new ArrayList<>();
for(PyFunction function : superFunctions)
{
final String name = function.getName();
if(name == null || PyUtil.isClassPrivateName(name))
{
continue;
}
if(pyClass.findMethodByName(name, false, null) == null)
{
final PyMethodMember member = new PyMethodMember(function);
elements.add(member);
}
}
if(elements.size() == 0)
{
return;
}
final MemberChooser<PyMethodMember> chooser = new MemberChooser<PyMethodMember>(elements.toArray(new PyMethodMember[elements.size()]), false, true, project)
{
@Override
protected SpeedSearchComparator getSpeedSearchComparator()
{
return new SpeedSearchComparator(false)
{
@Nullable
@Override
public Iterable<TextRange> matchingFragments(@Nonnull String pattern, @Nonnull String text)
{
return super.matchingFragments(PyMethodMember.trimUnderscores(pattern), text);
}
};
}
};
chooser.setTitle(title);
chooser.setCopyJavadocVisible(false);
chooser.show();
if(chooser.getExitCode() != DialogWrapper.OK_EXIT_CODE)
{
return;
}
List<PyMethodMember> membersToOverride = chooser.getSelectedElements();
overrideMethods(editor, pyClass, membersToOverride, implement);
}
public static void overrideMethods(final Editor editor, final PyClass pyClass, final List<PyMethodMember> membersToOverride, final boolean implement)
{
if(membersToOverride == null)
{
return;
}
new WriteCommandAction(pyClass.getProject(), pyClass.getContainingFile())
{
protected void run(@Nonnull final Result result) throws Throwable
{
write(pyClass, membersToOverride, editor, implement);
}
}.execute();
}
private static void write(@Nonnull final PyClass pyClass, @Nonnull final List<PyMethodMember> newMembers, @Nonnull final Editor editor, boolean implement)
{
final PyStatementList statementList = pyClass.getStatementList();
final int offset = editor.getCaretModel().getOffset();
PsiElement anchor = null;
for(PyStatement statement : statementList.getStatements())
{
if(statement.getTextRange().getStartOffset() < offset || (statement instanceof PyExpressionStatement && ((PyExpressionStatement) statement).getExpression() instanceof
PyStringLiteralExpression))
{
anchor = statement;
}
}
PyFunction element = null;
for(PyMethodMember newMember : newMembers)
{
PyFunction baseFunction = (PyFunction) newMember.getPsiElement();
final PyFunctionBuilder builder = buildOverriddenFunction(pyClass, baseFunction, implement);
PyFunction function = builder.addFunctionAfter(statementList, anchor, LanguageLevel.forElement(statementList));
element = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(function);
}
PyPsiUtils.removeRedundantPass(statementList);
if(element != null)
{
final PyStatementList targetStatementList = element.getStatementList();
final int start = targetStatementList.getTextRange().getStartOffset();
editor.getCaretModel().moveToOffset(start);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
editor.getSelectionModel().setSelection(start, element.getTextRange().getEndOffset());
}
}
private static PyFunctionBuilder buildOverriddenFunction(PyClass pyClass, PyFunction baseFunction, boolean implement)
{
final boolean overridingNew = PyNames.NEW.equals(baseFunction.getName());
assert baseFunction.getName() != null;
PyFunctionBuilder pyFunctionBuilder = new PyFunctionBuilder(baseFunction.getName(), baseFunction);
final PyDecoratorList decorators = baseFunction.getDecoratorList();
boolean baseMethodIsStatic = false;
if(decorators != null)
{
if(decorators.findDecorator(PyNames.CLASSMETHOD) != null)
{
pyFunctionBuilder.decorate(PyNames.CLASSMETHOD);
}
else if(decorators.findDecorator(PyNames.STATICMETHOD) != null)
{
baseMethodIsStatic = true;
pyFunctionBuilder.decorate(PyNames.STATICMETHOD);
}
else if(decorators.findDecorator(PyNames.PROPERTY) != null || decorators.findDecorator(PyNames.ABSTRACTPROPERTY) != null)
{
pyFunctionBuilder.decorate(PyNames.PROPERTY);
}
}
PyAnnotation anno = baseFunction.getAnnotation();
if(anno != null)
{
pyFunctionBuilder.annotation(anno.getText());
}
final TypeEvalContext context = TypeEvalContext.userInitiated(baseFunction.getProject(), baseFunction.getContainingFile());
final List<PyParameter> baseParams = PyUtil.getParameters(baseFunction, context);
for(PyParameter parameter : baseParams)
{
pyFunctionBuilder.parameter(parameter.getText());
}
PyClass baseClass = baseFunction.getContainingClass();
assert baseClass != null;
StringBuilder statementBody = new StringBuilder();
boolean hadStar = false;
List<String> parameters = new ArrayList<>();
for(PyParameter parameter : baseParams)
{
final PyNamedParameter pyNamedParameter = parameter.getAsNamed();
if(pyNamedParameter != null)
{
String repr = pyNamedParameter.getRepr(false);
parameters.add(hadStar && !pyNamedParameter.isKeywordContainer() ? pyNamedParameter.getName() + "=" + repr : repr);
if(pyNamedParameter.isPositionalContainer())
{
hadStar = true;
}
}
else if(parameter instanceof PySingleStarParameter)
{
hadStar = true;
}
else
{
parameters.add(parameter.getText());
}
}
if(PyNames.FAKE_OLD_BASE.equals(baseClass.getName()) || raisesNotImplementedError(baseFunction) || implement)
{
statementBody.append(PyNames.PASS);
}
else
{
if(!PyNames.INIT.equals(baseFunction.getName()) && context.getReturnType(baseFunction) != PyNoneType.INSTANCE || overridingNew)
{
statementBody.append("return ");
}
if(baseClass.isNewStyleClass(context))
{
statementBody.append(PyNames.SUPER);
statementBody.append("(");
final LanguageLevel langLevel = ((PyFile) pyClass.getContainingFile()).getLanguageLevel();
if(!langLevel.isPy3K())
{
final String baseFirstName = !baseParams.isEmpty() ? baseParams.get(0).getName() : null;
final String firstName = baseFirstName != null ? baseFirstName : PyNames.CANONICAL_SELF;
PsiElement outerClass = PsiTreeUtil.getParentOfType(pyClass, PyClass.class, true, PyFunction.class);
String className = pyClass.getName();
final List<String> nameResult = Lists.newArrayList(className);
while(outerClass != null)
{
nameResult.add(0, ((PyClass) outerClass).getName());
outerClass = PsiTreeUtil.getParentOfType(outerClass, PyClass.class, true, PyFunction.class);
}
StringUtil.join(nameResult, ".", statementBody);
statementBody.append(", ").append(firstName);
}
statementBody.append(").").append(baseFunction.getName()).append("(");
// type.__new__ is explicitly decorated as @staticmethod in our stubs, but not in real Python code
if(parameters.size() > 0 && !(baseMethodIsStatic || overridingNew))
{
parameters.remove(0);
}
}
else
{
statementBody.append(getReferenceText(pyClass, baseClass)).append(".").append(baseFunction.getName()).append("(");
}
StringUtil.join(parameters, ", ", statementBody);
statementBody.append(")");
}
pyFunctionBuilder.statement(statementBody.toString());
return pyFunctionBuilder;
}
public static boolean raisesNotImplementedError(@Nonnull PyFunction function)
{
PyStatementList statementList = function.getStatementList();
IfVisitor visitor = new IfVisitor();
statementList.accept(visitor);
return !visitor.hasReturnInside && visitor.raiseNotImplemented;
}
// TODO find a better place for this logic
private static String getReferenceText(PyClass fromClass, PyClass toClass)
{
final PyExpression[] superClassExpressions = fromClass.getSuperClassExpressions();
for(PyExpression expression : superClassExpressions)
{
if(expression instanceof PyReferenceExpression)
{
PsiElement target = ((PyReferenceExpression) expression).getReference().resolve();
if(target == toClass)
{
return expression.getText();
}
}
}
return toClass.getName();
}
/**
* Returns all super functions available through MRO.
*/
@Nonnull
public static List<PyFunction> getAllSuperFunctions(@Nonnull PyClass pyClass, @Nonnull TypeEvalContext context)
{
final Map<String, PyFunction> functions = Maps.newLinkedHashMap();
for(final PyClassLikeType type : pyClass.getAncestorTypes(context))
{
if(type != null)
{
for(PyFunction function : PyTypeUtil.getMembersOfType(type, PyFunction.class, false, context))
{
final String name = function.getName();
if(name != null && !functions.containsKey(name))
{
functions.put(name, function);
}
}
}
}
return Lists.newArrayList(functions.values());
}
private static class IfVisitor extends PyRecursiveElementVisitor
{
private boolean hasReturnInside;
private boolean raiseNotImplemented;
@Override
public void visitPyReturnStatement(PyReturnStatement node)
{
hasReturnInside = true;
}
@Override
public void visitPyRaiseStatement(PyRaiseStatement node)
{
final PyExpression[] expressions = node.getExpressions();
if(expressions.length > 0)
{
final PyExpression firstExpression = expressions[0];
if(firstExpression instanceof PyCallExpression)
{
final PyExpression callee = ((PyCallExpression) firstExpression).getCallee();
if(callee != null && callee.getText().equals(PyNames.NOT_IMPLEMENTED_ERROR))
{
raiseNotImplemented = true;
}
}
else if(firstExpression.getText().equals(PyNames.NOT_IMPLEMENTED_ERROR))
{
raiseNotImplemented = true;
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/**
* Test various distributed aspects of transactions, e.g. locking/reservation symantics that do not
* need multiple Region configurations. For those tests see <code>MultiVMRegionTestCase</code>.
*
*
* @since GemFire 4.0
* @see MultiVMRegionTestCase
*
*/
package org.apache.geode.cache30;
import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_NETWORK_PARTITION_DETECTION;
import static org.apache.geode.distributed.ConfigurationProperties.LOG_LEVEL;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.io.Serializable;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import org.junit.Ignore;
import org.junit.Test;
import org.apache.geode.SystemFailure;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheLoader;
import org.apache.geode.cache.CacheTransactionManager;
import org.apache.geode.cache.CommitConflictException;
import org.apache.geode.cache.CommitIncompleteException;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.DiskAccessException;
import org.apache.geode.cache.LoaderHelper;
import org.apache.geode.cache.MirrorType;
import org.apache.geode.cache.Operation;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.TimeoutException;
import org.apache.geode.distributed.internal.ResourceEvent;
import org.apache.geode.distributed.internal.ResourceEventsListener;
import org.apache.geode.distributed.internal.locks.DLockBatch;
import org.apache.geode.distributed.internal.locks.DLockService;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.distributed.internal.membership.api.MembershipManagerHelper;
import org.apache.geode.internal.cache.CommitReplyException;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.cache.InternalRegionArguments;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.RegionEntry;
import org.apache.geode.internal.cache.TXManagerImpl;
import org.apache.geode.internal.cache.TXState;
import org.apache.geode.internal.cache.TXStateInterface;
import org.apache.geode.internal.cache.TXStateProxyImpl;
import org.apache.geode.internal.cache.locks.TXLockBatch;
import org.apache.geode.internal.cache.locks.TXLockService;
import org.apache.geode.internal.cache.locks.TXLockServiceImpl;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.Invoke;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.SerializableCallable;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
public class TXDistributedDUnitTest extends JUnit4CacheTestCase {
protected <K, V> RegionAttributes<K, V> getRegionAttributes() {
return this.getRegionAttributes(Scope.DISTRIBUTED_ACK);
}
protected <K, V> RegionAttributes<K, V> getRegionAttributes(Scope scope) {
AttributesFactory<K, V> factory = new AttributesFactory<>();
factory.setScope(scope);
if (scope.isDistributedAck()) {
factory.setEarlyAck(false);
}
return factory.create();
}
/**
* Test a remote grantor
*/
@Test
public void testRemoteGrantor() throws Exception {
IgnoredException.addIgnoredException("killing members ds");
final CacheTransactionManager txMgr = this.getCache().getCacheTransactionManager();
final String rgnName = getUniqueName();
Region rgn = getCache().createRegion(rgnName, getRegionAttributes());
rgn.create("key", null);
Invoke.invokeInEveryVM(new SerializableRunnable("testRemoteGrantor: initial configuration") {
@Override
public void run() {
try {
Region rgn1 = getCache().createRegion(rgnName, getRegionAttributes());
rgn1.put("key", "val0");
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
});
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
// VM vm1 = host.getVM(1);
// VM vm2 = host.getVM(2);
vm0.invoke(new SerializableRunnable("testRemoteGrantor: remote grantor init") {
@Override
public void run() {
try {
Region rgn1 = getCache().getRegion(rgnName);
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
txMgr2.begin();
rgn1.put("key", "val1");
txMgr2.commit();
assertNotNull(TXLockService.getDTLS());
assertTrue(TXLockService.getDTLS().isLockGrantor());
} catch (CacheException e) {
fail("While performing first transaction");
}
}
});
// fix for bug 38843 causes the DTLS to be created in every TX participant
assertNotNull(TXLockService.getDTLS());
assertFalse(TXLockService.getDTLS().isLockGrantor());
assertEquals("val1", rgn.getEntry("key").getValue());
vm0.invoke(new SerializableRunnable("Disconnect from DS, remote grantor death") {
@Override
public void run() {
try {
MembershipManagerHelper.crashDistributedSystem(getSystem());
} finally {
// Allow getCache() to re-establish a ds connection
closeCache();
}
}
});
// Make this VM the remote Grantor
txMgr.begin();
rgn.put("key", "val2");
txMgr.commit();
assertNotNull(TXLockService.getDTLS());
assertTrue(TXLockService.getDTLS().isLockGrantor());
SerializableRunnable remoteComm =
new SerializableRunnable("testRemoteGrantor: remote grantor commit") {
@Override
public void run() {
try {
Cache c = getCache();
CacheTransactionManager txMgr2 = c.getCacheTransactionManager();
Region rgn1 = c.getRegion(rgnName);
if (rgn1 == null) {
// This block should only execute on VM0
rgn1 = c.createRegion(rgnName, getRegionAttributes());
}
txMgr2.begin();
rgn1.put("key", "val3");
txMgr2.commit();
if (TXLockService.getDTLS() != null) {
assertTrue(!TXLockService.getDTLS().isLockGrantor());
}
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
};
Invoke.invokeInEveryVM(remoteComm);
// vm1.invoke(remoteComm);
// vm2.invoke(remoteComm);
assertNotNull(TXLockService.getDTLS());
assertTrue(TXLockService.getDTLS().isLockGrantor());
assertEquals("val3", rgn.getEntry("key").getValue());
rgn.destroyRegion();
}
/**
* Test the internal callbacks used for what else... testing
*/
@Test
public void testInternalCallbacks() throws Exception {
final CacheTransactionManager txMgr = this.getCache().getCacheTransactionManager();
final String rgnName1 = getUniqueName() + "_1";
final String rgnName2 = getUniqueName() + "_2";
final String rgnName3 = getUniqueName() + "_3";
Region rgn1 = getCache().createRegion(rgnName1, getRegionAttributes());
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
SerializableRunnable createRgn =
new SerializableRunnable("testInternalCallbacks: initial configuration") {
@Override
public void run() {
try {
Region rgn1a = getCache().createRegion(rgnName1, getRegionAttributes());
Region rgn2 = getCache().createRegion(rgnName2, getRegionAttributes());
Region rgn3 =
getCache().createRegion(rgnName3, getRegionAttributes(Scope.DISTRIBUTED_NO_ACK));
rgn1a.create("key", null);
rgn2.create("key", null);
rgn3.create("key", null);
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
};
vm0.invoke(createRgn);
vm1.invoke(createRgn);
// Standard commit check
txMgr.begin();
rgn1.put("key", "value0");
txMgr.commit();
SerializableRunnable checkRgn1 =
new SerializableRunnable("testInternalCallbacks: check rgn1 valus") {
@Override
public void run() {
Region rgn1a = getCache().getRegion(rgnName1);
assertNotNull(rgn1a);
assertEquals("value0", rgn1a.getEntry("key").getValue());
}
};
vm0.invoke(checkRgn1);
vm1.invoke(checkRgn1);
{
final byte cbSensors[] = {0, 0, 0, 0, 0, 0, 0, 0, 0};
txMgr.begin();
((TXStateProxyImpl) ((TXManagerImpl) txMgr).getTXState()).forceLocalBootstrap();
setInternalCallbacks(((TXManagerImpl) txMgr).getTXState(), cbSensors);
rgn1.put("key", "value1");
txMgr.commit();
for (int i = cbSensors.length - 3; i >= 0; --i) {
assertEquals("Internal callback " + i + " was not called the expected number of times!",
(byte) 1, cbSensors[i]);
}
for (int i = cbSensors.length - 1; i > cbSensors.length - 3; --i) {
assertEquals(
"Internal \"during\" callback " + i + " invoked an unexpected number of times!",
(byte) 2, cbSensors[i]);
}
}
SerializableRunnable checkRgn1Again =
new SerializableRunnable("testInternalCallbacks: validate remote values") {
@Override
public void run() {
Region rgn1a = getCache().getRegion(rgnName1);
assertNotNull(rgn1a);
assertEquals("value1", rgn1a.getEntry("key").getValue());
}
};
vm0.invoke(checkRgn1Again);
vm1.invoke(checkRgn1Again);
// Try 2 regions
Region rgn2 = getCache().createRegion(rgnName2, getRegionAttributes());
txMgr.begin();
rgn1.put("key", "value2");
rgn2.put("key", "value2");
txMgr.commit();
SerializableRunnable checkRgn12 =
new SerializableRunnable("testInternalCallbacks: check rgn1 valus") {
@Override
public void run() {
Region rgn1a = getCache().getRegion(rgnName1);
assertNotNull(rgn1a);
assertEquals("value2", rgn1a.getEntry("key").getValue());
Region rgn2a = getCache().getRegion(rgnName2);
assertNotNull(rgn2a);
assertEquals("value2", rgn2a.getEntry("key").getValue());
}
};
vm0.invoke(checkRgn12);
vm1.invoke(checkRgn12);
{
final byte cbSensors[] = {0, 0, 0, 0, 0, 0, 0, 0, 0};
txMgr.begin();
((TXStateProxyImpl) ((TXManagerImpl) txMgr).getTXState()).forceLocalBootstrap();
setInternalCallbacks(((TXManagerImpl) txMgr).getTXState(), cbSensors);
rgn1.put("key", "value3");
rgn2.put("key", "value3");
txMgr.commit();
for (int i = cbSensors.length - 3; i >= 0; i--) {
assertEquals("Internal callback " + i + " was not called the expected number of times!",
(byte) 1, cbSensors[i]);
}
for (int i = cbSensors.length - 1; i > cbSensors.length - 3; --i) {
assertEquals(
"Internal \"during\" callback " + i + " invoked an unexpected number of times!",
(byte) 2, cbSensors[i]);
}
}
SerializableRunnable checkRgn12Again =
new SerializableRunnable("testInternalCallbacks: validate both regions remote values") {
@Override
public void run() {
Region rgn1a = getCache().getRegion(rgnName1);
assertNotNull(rgn1a);
assertEquals("value3", rgn1a.getEntry("key").getValue());
Region rgn2a = getCache().getRegion(rgnName2);
assertNotNull(rgn2a);
assertEquals("value3", rgn2a.getEntry("key").getValue());
}
};
vm0.invoke(checkRgn12Again);
vm1.invoke(checkRgn12Again);
// Try a third region (D_NO_ACK)
Region rgn3 = getCache().createRegion(rgnName3, getRegionAttributes(Scope.DISTRIBUTED_NO_ACK));
txMgr.begin();
rgn1.put("key", "value4");
rgn2.put("key", "value4");
rgn3.put("key", "value4");
txMgr.commit();
SerializableRunnable checkRgn123 =
new SerializableRunnable("testInternalCallbacks: check rgn1 valus") {
@Override
public void run() {
Region rgn1a = getCache().getRegion(rgnName1);
assertNotNull(rgn1a);
assertEquals("value4", rgn1a.getEntry("key").getValue());
Region rgn2a = getCache().getRegion(rgnName2);
assertNotNull(rgn2a);
assertEquals("value4", rgn2a.getEntry("key").getValue());
Region rgn3a = getCache().getRegion(rgnName3);
assertNotNull(rgn3a);
assertEquals("value4", rgn3a.getEntry("key").getValue());
}
};
vm0.invoke(checkRgn123);
vm1.invoke(checkRgn123);
{
final byte cbSensors[] = {0, 0, 0, 0, 0, 0, 0, 0, 0};
txMgr.begin();
((TXStateProxyImpl) ((TXManagerImpl) txMgr).getTXState()).forceLocalBootstrap();
setInternalCallbacks(((TXManagerImpl) txMgr).getTXState(), cbSensors);
rgn1.put("key", "value5");
rgn2.put("key", "value5");
rgn3.put("key", "value5");
txMgr.commit();
for (int i = cbSensors.length - 3; i >= 0; i--) {
assertEquals("Internal callback " + i + " was not called the expected number of times!",
(byte) 1, cbSensors[i]);
}
for (int i = cbSensors.length - 1; i > cbSensors.length - 3; --i) {
assertEquals(
"Internal \"during\" callback " + i + " invoked an unexpected number of times!",
(byte) 2, cbSensors[i]);
}
}
SerializableRunnable checkRgn123Again =
new SerializableRunnable("testInternalCallbacks: validate both regions remote values") {
@Override
public void run() {
Region rgn1a = getCache().getRegion(rgnName1);
assertNotNull(rgn1a);
assertEquals("value5", rgn1a.getEntry("key").getValue());
Region rgn2a = getCache().getRegion(rgnName2);
assertNotNull(rgn2a);
assertEquals("value5", rgn2a.getEntry("key").getValue());
Region rgn3a = getCache().getRegion(rgnName3);
assertNotNull(rgn3a);
assertEquals("value5", rgn3a.getEntry("key").getValue());
}
};
vm0.invoke(checkRgn123Again);
vm1.invoke(checkRgn123Again);
rgn1.destroyRegion();
rgn2.destroyRegion();
}
static void setInternalCallbacks(TXStateInterface txp, final byte[] cbSensors) {
((TXStateProxyImpl) txp).forceLocalBootstrap();
TXState tx = (TXState) ((TXStateProxyImpl) txp).getRealDeal(null, null);
assertEquals(9, cbSensors.length);
tx.setAfterReservation(new Runnable() {
@Override
public void run() {
cbSensors[0]++;
}
});
tx.setAfterConflictCheck(new Runnable() {
@Override
public void run() {
cbSensors[1]++;
}
});
tx.setAfterApplyChanges(new Runnable() {
@Override
public void run() {
cbSensors[2]++;
}
});
tx.setAfterReleaseLocalLocks(new Runnable() {
@Override
public void run() {
cbSensors[3]++;
}
});
tx.setAfterIndividualSend(new Runnable() {
@Override
public void run() {
cbSensors[4]++;
}
});
tx.setAfterIndividualCommitProcess(new Runnable() {
@Override
public void run() {
cbSensors[5]++;
}
});
tx.setAfterSend(new Runnable() {
@Override
public void run() {
cbSensors[6]++;
}
});
tx.setDuringIndividualSend(new Runnable() {
@Override
public void run() {
cbSensors[7]++;
}
});
tx.setDuringIndividualCommitProcess(new Runnable() {
@Override
public void run() {
cbSensors[8]++;
}
});
}
/**
* Test distributed ack transactions that consist only of data from loaded values
*/
@Test
public void testDACKLoadedMessage() throws Exception {
final CacheTransactionManager txMgr = this.getCache().getCacheTransactionManager();
final String rgnName = getUniqueName();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setEarlyAck(false);
factory.setCacheLoader(new CacheLoader() {
@Override
public Object load(LoaderHelper helper) {
return "val" + helper.getArgument();
}
@Override
public void close() {}
});
Region rgn = getCache().createRegion(rgnName, factory.create());
Invoke.invokeInEveryVM(new SerializableRunnable("testDACKLoadedMessage: intial configuration") {
@Override
public void run() {
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
// factory.setDataPolicy(DataPolicy.REPLICATE);
factory2.setMirrorType(MirrorType.KEYS);
getCache().createRegion(rgnName, factory2.create());
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
});
// Confirm the standard case
txMgr.begin();
rgn.put("key1", "val1");
txMgr.commit();
assertEquals("val1", rgn.getEntry("key1").getValue());
Invoke
.invokeInEveryVM(new SerializableRunnable("testDACKLoadedMessage: confirm standard case") {
@Override
public void run() {
Region rgn1 = getCache().getRegion(rgnName);
assertEquals("val1", rgn1.getEntry("key1").getValue());
}
});
// Confirm loaded value case
txMgr.begin();
rgn.get("key2", new Integer(2));
txMgr.commit();
assertEquals("val2", rgn.getEntry("key2").getValue());
Invoke
.invokeInEveryVM(new SerializableRunnable("testDACKLoadedMessage: confirm standard case") {
@Override
public void run() {
Region rgn1 = getCache().getRegion(rgnName);
assertEquals("val2", rgn1.getEntry("key2").getValue());
}
});
// This should use the ack w/ the lockid
txMgr.begin();
rgn.put("key3", "val3");
rgn.get("key4", new Integer(4));
txMgr.commit();
Invoke
.invokeInEveryVM(new SerializableRunnable("testDACKLoadedMessage: confirm standard case") {
@Override
public void run() {
Region rgn1 = getCache().getRegion(rgnName);
assertEquals("val3", rgn1.getEntry("key3").getValue());
assertEquals("val4", rgn1.getEntry("key4").getValue());
}
});
}
@Override
public Properties getDistributedSystemProperties() {
Properties p = super.getDistributedSystemProperties();
p.put(LOG_LEVEL, LogWriterUtils.getDUnitLogLevel());
p.put(ENABLE_NETWORK_PARTITION_DETECTION, "false");
return p;
}
@Test
public void testHighAvailabilityFeatures() throws Exception {
IgnoredException.addIgnoredException("DistributedSystemDisconnectedException");
final String rgnName = getUniqueName();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setEarlyAck(false);
Region rgn = getCache().createRegion(rgnName, factory.create());
Invoke.invokeInEveryVM(
new SerializableRunnable("testHighAvailabilityFeatures: intial region configuration") {
@Override
public void run() {
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
factory2.setDataPolicy(DataPolicy.REPLICATE);
getCache().createRegion(rgnName, factory2.create());
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
});
// create entries
rgn.put("key0", "val0_0");
rgn.put("key1", "val1_0");
Host host = Host.getHost(0);
// This test assumes that there are at least three VMs; the origin and two recipients
assertTrue(host.getVMCount() >= 3);
final VM originVM = host.getVM(0);
// Test that there is no commit after a partial commit message
// send (only sent to a minority of the recipients)
originVM.invoke(new SerializableRunnable("Flakey DuringIndividualSend Transaction") {
@Override
public void run() {
final Region rgn1 = getCache().getRegion(rgnName);
assertNotNull(rgn1);
try {
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
final CacheTransactionManager txMgrImpl = txMgr2;
txMgr2.begin();
// 1. setup an internal callback on originVM that will call
// disconnectFromDS() on the 2nd duringIndividualSend
// call.
((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState()).forceLocalBootstrap();
TXState txState = (TXState) ((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState())
.getRealDeal(null, null);
txState.setDuringIndividualSend(new Runnable() {
private int numCalled = 0;
@Override
public synchronized void run() {
++numCalled;
rgn1.getCache().getLogger()
.info("setDuringIndividualSend Runnable called " + numCalled + " times");
if (numCalled > 1) {
MembershipManagerHelper.crashDistributedSystem(getSystem());
}
}
});
rgn1.put("key0", "val0_1");
rgn1.put("key1", "val1_1");
// 2. commit a transaction in originVM, it will disconnect from the DS
txMgr2.commit();
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Throwable e) {
rgn1.getCache().getLogger().warning("Ignoring Exception", e);
} finally {
// Allow this VM to re-connect to the DS upon getCache() call
closeCache();
}
}
});
// 3. verify on all VMs that the transaction was not committed
final SerializableRunnable noChangeValidator =
new SerializableRunnable("testHighAvailabilityFeatures: validate no change in Region") {
@Override
public void run() {
Region rgn1 = getCache().getRegion(rgnName);
if (rgn1 == null) {
// Expect a null region from originVM
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
factory2.setDataPolicy(DataPolicy.REPLICATE);
rgn1 = getCache().createRegion(rgnName, factory2.create());
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
Region.Entry re = rgn1.getEntry("key0");
assertNotNull(re);
assertEquals("val0_0", re.getValue());
re = rgn1.getEntry("key1");
assertNotNull(re);
assertEquals("val1_0", re.getValue());
}
};
Invoke.invokeInEveryVM(noChangeValidator);
// Test that there is no commit after sending to all recipients
// but prior to sending the "commit process" message
originVM.invoke(new SerializableRunnable("Flakey AfterIndividualSend Transaction") {
@Override
public void run() {
final Region rgn1 = getCache().getRegion(rgnName);
assertNotNull(rgn1);
try {
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
final CacheTransactionManager txMgrImpl = txMgr2;
txMgr2.begin();
// 1. setup an internal callback on originVM that will call
// disconnectFromDS() on AfterIndividualSend
((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState()).forceLocalBootstrap();
TXState txState = (TXState) ((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState())
.getRealDeal(null, null);
txState.setAfterIndividualSend(new Runnable() {
@Override
public synchronized void run() {
MembershipManagerHelper.crashDistributedSystem(getSystem());
}
});
rgn1.put("key0", "val0_2");
rgn1.put("key1", "val1_2");
// 2. commit a transaction in originVM, it will disconnect from the DS
txMgr2.commit();
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Throwable e) {
rgn1.getCache().getLogger().warning("Ignoring Exception", e);
} finally {
// Allow this VM to re-connect to the DS upon getCache() call
closeCache();
}
}
});
// 3. verify on all VMs, including the origin, that the transaction was not committed
Invoke.invokeInEveryVM(noChangeValidator);
// Test commit success upon a single commit process message received.
originVM.invoke(new SerializableRunnable("Flakey DuringIndividualCommitProcess Transaction") {
@Override
public void run() {
final Region rgn1 = getCache().getRegion(rgnName);
assertNotNull(rgn1);
try {
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
final CacheTransactionManager txMgrImpl = txMgr2;
txMgr2.begin();
((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState()).forceLocalBootstrap();
TXState txState = (TXState) ((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState())
.getRealDeal(null, null);
// 1. setup an internal callback on originVM that will call
// disconnectFromDS() on the 2nd internalDuringIndividualCommitProcess
// call.
txState.setDuringIndividualCommitProcess(new Runnable() {
private int numCalled = 0;
@Override
public synchronized void run() {
++numCalled;
rgn1.getCache().getLogger()
.info("setDuringIndividualCommitProcess Runnable called " + numCalled + " times");
if (numCalled > 1) {
MembershipManagerHelper.crashDistributedSystem(getSystem());
}
}
});
rgn1.put("key0", "val0_3");
rgn1.put("key1", "val1_3");
// 2. commit a transaction in originVM, it will disconnect from the DS
txMgr2.commit();
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Throwable e) {
rgn1.getCache().getLogger().warning("Ignoring Exception", e);
} finally {
// Allow this VM to re-connect to the DS upon getCache() call
closeCache();
}
}
});
// 3. verify on all VMs that the transaction was committed (including the orgin, due to GII)
SerializableRunnable nonSoloChangeValidator1 = new SerializableRunnable(
"testHighAvailabilityFeatures: validate v1 non-solo Region changes") {
@Override
public void run() {
Region rgn1 = getCache().getRegion(rgnName);
if (rgn1 == null) {
// Expect a null region from originVM
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
factory2.setDataPolicy(DataPolicy.REPLICATE);
rgn1 = getCache().createRegion(rgnName, factory2.create());
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
long giveUp = System.currentTimeMillis() + 10000;
while (giveUp > System.currentTimeMillis()) {
try {
Region.Entry re = rgn1.getEntry("key0");
assertNotNull(re);
assertEquals("val0_3", re.getValue());
re = rgn1.getEntry("key1");
assertNotNull(re);
assertEquals("val1_3", re.getValue());
break;
} catch (AssertionError e) {
if (giveUp > System.currentTimeMillis()) {
throw e;
}
}
}
}
};
Invoke.invokeInEveryVM(nonSoloChangeValidator1);
// Verify successful solo region commit after duringIndividualSend
// (same as afterIndividualSend).
// Create a region that only exists on the origin and another VM
final String soloRegionName = getUniqueName() + "_solo";
SerializableRunnable createSoloRegion =
new SerializableRunnable("testHighAvailabilityFeatures: solo region configuration") {
@Override
public void run() {
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
factory2.setDataPolicy(DataPolicy.REPLICATE);
Region rgn1 = getCache().createRegion(soloRegionName, factory2.create());
rgn1.put("soloKey0", "soloVal0_0");
rgn1.put("soloKey1", "soloVal1_0");
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
};
final VM soloRegionVM = host.getVM(1);
originVM.invoke(createSoloRegion);
soloRegionVM.invoke(createSoloRegion);
originVM
.invoke(new SerializableRunnable("Flakey solo region DuringIndividualSend Transaction") {
@Override
public void run() {
final Region soloRgn = getCache().getRegion(soloRegionName);
assertNotNull(soloRgn);
try {
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
final CacheTransactionManager txMgrImpl = txMgr2;
txMgr2.begin();
// 1. setup an internal callback on originVM that will call
// disconnectFromDS() on the 2nd duringIndividualSend
// call.
((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState()).forceLocalBootstrap();
TXState txState =
(TXState) ((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState())
.getRealDeal(null, null);
txState.setDuringIndividualSend(new Runnable() {
private int numCalled = 0;
@Override
public synchronized void run() {
++numCalled;
soloRgn.getCache().getLogger()
.info("setDuringIndividualSend Runnable called " + numCalled + " times");
if (numCalled > 1) {
MembershipManagerHelper.crashDistributedSystem(getSystem());
}
}
});
soloRgn.put("soloKey0", "soloVal0_1");
soloRgn.put("soloKey1", "soloVal1_1");
// 2. commit a transaction in originVM, it will disconnect from the DS
txMgr2.commit();
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Throwable e) {
soloRgn.getCache().getLogger().warning("Ignoring Exception", e);
} finally {
// Allow this VM to re-connect to the DS upon getCache() call
closeCache();
}
}
});
// 3. verify on the soloRegionVM that the transaction was committed
final SerializableRunnable soloRegionCommitValidator1 = new SerializableRunnable(
"testHighAvailabilityFeatures: validate successful v1 commit in solo Region") {
@Override
public void run() {
Region soloRgn = getCache().getRegion(soloRegionName);
if (soloRgn == null) {
// Expect a null region from originVM
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
factory2.setDataPolicy(DataPolicy.REPLICATE);
soloRgn = getCache().createRegion(soloRegionName, factory2.create());
} catch (CacheException e) {
Assert.fail("While creating region ", e);
}
}
Region.Entry re = soloRgn.getEntry("soloKey0");
assertNotNull(re);
assertEquals("soloVal0_1", re.getValue());
re = soloRgn.getEntry("soloKey1");
assertNotNull(re);
assertEquals("soloVal1_1", re.getValue());
}
};
originVM.invoke(soloRegionCommitValidator1);
soloRegionVM.invoke(soloRegionCommitValidator1);
// verify no change in nonSolo region, re-establish region in originVM
Invoke.invokeInEveryVM(nonSoloChangeValidator1);
// Verify no commit for failed send (afterIndividualSend) for solo
// Region combined with non-solo Region
originVM.invoke(new SerializableRunnable(
"Flakey mixed (solo+non-solo) region DuringIndividualSend Transaction") {
@Override
public void run() {
final Region rgn1 = getCache().getRegion(rgnName);
assertNotNull(rgn1);
final Region soloRgn = getCache().getRegion(soloRegionName);
assertNotNull(soloRgn);
try {
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
final CacheTransactionManager txMgrImpl = txMgr2;
txMgr2.begin();
// 1. setup an internal callback on originVM that will call
// disconnectFromDS() on the afterIndividualSend
// call.
((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState()).forceLocalBootstrap();
TXState txState = (TXState) ((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState())
.getRealDeal(null, null);
txState.setAfterIndividualSend(new Runnable() {
@Override
public synchronized void run() {
MembershipManagerHelper.crashDistributedSystem(getSystem());
}
});
rgn1.put("key0", "val0_4");
rgn1.put("key1", "val1_4");
soloRgn.put("soloKey0", "soloVal0_2");
soloRgn.put("soloKey1", "soloVal1_2");
// 2. commit a transaction in originVM, it will disconnect from the DS
txMgr2.commit();
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Throwable e) {
rgn1.getCache().getLogger().warning("Ignoring Exception", e);
} finally {
// Allow this VM to re-connect to the DS upon getCache() call
closeCache();
}
}
});
// Origin and Solo Region VM should be the same as last validation
originVM.invoke(soloRegionCommitValidator1);
soloRegionVM.invoke(soloRegionCommitValidator1);
Invoke.invokeInEveryVM(nonSoloChangeValidator1);
// Verify commit after sending a single
// (duringIndividualCommitProcess) commit process for solo Region
// combined with non-solo Region
originVM.invoke(new SerializableRunnable(
"Flakey mixed (solo+non-solo) region DuringIndividualCommitProcess Transaction") {
@Override
public void run() {
final Region rgn1 = getCache().getRegion(rgnName);
assertNotNull(rgn1);
final Region soloRgn = getCache().getRegion(soloRegionName);
assertNotNull(soloRgn);
try {
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
final CacheTransactionManager txMgrImpl = txMgr2;
txMgr2.begin();
// 1. setup an internal callback on originVM that will call
// disconnectFromDS() on the afterIndividualSend
// call.
((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState()).forceLocalBootstrap();
TXState txState = (TXState) ((TXStateProxyImpl) ((TXManagerImpl) txMgrImpl).getTXState())
.getRealDeal(null, null);
txState.setAfterIndividualSend(new Runnable() {
private int numCalled = 0;
@Override
public synchronized void run() {
++numCalled;
rgn1.getCache().getLogger()
.info("setDuringIndividualCommitProcess Runnable called " + numCalled + " times");
if (numCalled > 1) {
MembershipManagerHelper.crashDistributedSystem(getSystem());
}
}
});
rgn1.put("key0", "val0_5");
rgn1.put("key1", "val1_5");
soloRgn.put("soloKey0", "soloVal0_3");
soloRgn.put("soloKey1", "soloVal1_3");
// 2. commit a transaction in originVM, it will disconnect from the DS
txMgr2.commit();
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Throwable e) {
rgn1.getCache().getLogger().warning("Ignoring Exception", e);
} finally {
// Allow this VM to re-connect to the DS upon getCache() call
closeCache();
}
}
});
final SerializableRunnable soloRegionCommitValidator2 = new SerializableRunnable(
"testHighAvailabilityFeatures: validate successful v2 commit in solo Region") {
@Override
public void run() {
Region soloRgn = getCache().getRegion(soloRegionName);
if (soloRgn == null) {
// Expect a null region from originVM
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
factory2.setDataPolicy(DataPolicy.REPLICATE);
soloRgn = getCache().createRegion(soloRegionName, factory2.create());
} catch (CacheException e) {
Assert.fail("While creating region ", e);
}
}
Region.Entry re = soloRgn.getEntry("soloKey0");
assertNotNull(re);
assertEquals("soloVal0_3", re.getValue());
re = soloRgn.getEntry("soloKey1");
assertNotNull(re);
assertEquals("soloVal1_3", re.getValue());
}
};
originVM.invoke(soloRegionCommitValidator2);
soloRegionVM.invoke(soloRegionCommitValidator2);
SerializableRunnable nonSoloChangeValidator2 = new SerializableRunnable(
"testHighAvailabilityFeatures: validate v2 non-solo Region changes") {
@Override
public void run() {
Region rgn1 = getCache().getRegion(rgnName);
if (rgn1 == null) {
// Expect a null region from originVM
try {
AttributesFactory factory2 = new AttributesFactory();
factory2.setScope(Scope.DISTRIBUTED_ACK);
factory2.setEarlyAck(false);
factory2.setDataPolicy(DataPolicy.REPLICATE);
rgn1 = getCache().createRegion(rgnName, factory2.create());
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
Region.Entry re = rgn1.getEntry("key0");
assertNotNull(re);
assertEquals("val0_5", re.getValue());
re = rgn1.getEntry("key1");
assertNotNull(re);
assertEquals("val1_5", re.getValue());
}
};
Invoke.invokeInEveryVM(nonSoloChangeValidator2);
}
/**
* A class used in testLockBatchParticipantsUpdate to pause a transaction in the afterResrvation
* and afterSend states.
*/
public static class PausibleTX implements Runnable {
public boolean isRunning = false;
public String rgnName = null;
public Cache myCache = null;
public Object key = null;
public Object value = null;
public boolean getIsRunning() {
return this.isRunning;
}
@Override
public void run() {
Region rgn = this.myCache.getRegion(this.rgnName);
final CacheTransactionManager txMgr = this.myCache.getCacheTransactionManager();
txMgr.begin();
((TXStateProxyImpl) ((TXManagerImpl) txMgr).getTXState()).forceLocalBootstrap();
TXState txState = (TXState) ((TXStateProxyImpl) ((TXManagerImpl) txMgr).getTXState())
.getRealDeal(null, null);
txState.setAfterReservation(new Runnable() {
@Override
public void run() {
try {
synchronized (PausibleTX.class) {
PausibleTX.this.isRunning = true;
// Notify the thread that created this, that we are ready
PausibleTX.class.notifyAll();
// Wait for the controller to start a GII and let us proceed
PausibleTX.class.wait();
}
} catch (InterruptedException ie) {
// PausibleTX.this.myCache.getLogger().info("Why was I interrupted? " + ie);
fail("interrupted");
}
}
});
txState.setAfterSend(new Runnable() {
@Override
public void run() {
try {
synchronized (PausibleTX.class) {
// Notify the controller that we have sent the TX data (and the
// update)
PausibleTX.class.notifyAll();
// Wait until the controller has determined in fact the update
// took place
PausibleTX.class.wait();
}
} catch (InterruptedException ie) {
// PausibleTX.this.myCache.getLogger().info("Why was I interrupted? " + ie);
fail("interrupted");
}
}
});
try {
rgn.put(key, value);
txMgr.commit();
} catch (CommitConflictException cce) {
fail(
"Did not expect commit conflict exception when sending updates to new members in PausibleTX"
+ cce);
// } catch (CacheException ce) {
// fail("Did not expect cache exception when sending updates to new members in PausibleTX" +
// ce);
}
}
}
/**
* Returns the GemFire system ID of the VM on which this method is run
*/
public static Serializable getSystemId() {
Serializable ret = null;
if (getSystemStatic() != null) {
ret = getSystemStatic().getDistributionManager().getId();
}
return ret;
}
static HashSet preTXSystemIds;
public static void setPreTXSystemIds(HashSet ids) {
TXDistributedDUnitTest.preTXSystemIds = ids;
}
static HashSet postTXSystemIds;
public static void setPostTXSystemIds(HashSet ids) {
TXDistributedDUnitTest.postTXSystemIds = ids;
}
static Serializable txHostId;
public static void setTXHostSystemId(Serializable id) {
TXDistributedDUnitTest.txHostId = id;
}
/**
* Test update of lock batch participants (needed when new members are discovered between a
* commit's locking phase and the applicatoin of the Region's data. See bug 32999
*/
@Test
public void testLockBatchParticipantsUpdate() throws Exception {
final String rgnName = getUniqueName();
Region rgn = getCache().createRegion(rgnName, getRegionAttributes());
rgn.create("key", null);
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
SerializableRunnable initRegions =
new SerializableRunnable("testLockBatchParticipantsUpdate: initial configuration") {
@Override
public void run() {
try {
Region rgn1 = getCache().createRegion(rgnName, getRegionAttributes());
rgn1.create("key", null);
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
};
vm0.invoke(initRegions);
vm1.invoke(initRegions);
rgn.put("key", "val1");
// Connect vm2 also since it may have been shutdown when logPerTest
// is turned on
vm2.invoke(new SerializableRunnable("connect vm2 if not connected") {
@Override
public void run() {
getCache();
}
});
// Make VM0 the Grantor
vm0.invoke(new SerializableRunnable("testLockBatchParticipantsUpdate: remote grantor init") {
@Override
public void run() {
try {
Region rgn1 = getCache().getRegion(rgnName);
final CacheTransactionManager txMgr2 = getCache().getCacheTransactionManager();
assertEquals("val1", rgn1.getEntry("key").getValue());
txMgr2.begin();
rgn1.put("key", "val2");
txMgr2.commit();
assertNotNull(TXLockService.getDTLS());
assertTrue(TXLockService.getDTLS().isLockGrantor());
} catch (CacheException e) {
fail("While performing first transaction");
}
}
});
// fix for bug 38843 causes the DTLS to be created in every TX participant
assertNotNull(TXLockService.getDTLS());
assertFalse(TXLockService.getDTLS().isLockGrantor());
assertEquals("val2", rgn.getEntry("key").getValue());
// Build sets of System Ids and set them up on VM0 for future batch member checks
HashSet txMembers = new HashSet(4);
txMembers.add(getSystemId());
txMembers.add(vm0.invoke(() -> TXDistributedDUnitTest.getSystemId()));
vm0.invoke(() -> TXDistributedDUnitTest.setPreTXSystemIds(txMembers));
txMembers.add(vm2.invoke(() -> TXDistributedDUnitTest.getSystemId()));
vm0.invoke(() -> TXDistributedDUnitTest.setPostTXSystemIds(txMembers));
// Don't include the tx host in the batch member set(s)
Serializable vm1HostId = (Serializable) vm1.invoke(() -> TXDistributedDUnitTest.getSystemId());
vm0.invoke(() -> TXDistributedDUnitTest.setTXHostSystemId(vm1HostId));
// Create a TX on VM1 (such that it will ask for locks on VM0) that uses the callbacks
// to pause and give us time to start a GII process on another VM
vm1.invoke(new SerializableRunnable(
"testLockBatchParticipantsUpdate: slow tx (one that detects new member)") {
@Override
public void run() {
// fix for bug 38843 causes the DTLS to be created in every TX participant
assertNotNull(TXLockService.getDTLS());
assertFalse(TXLockService.getDTLS().isLockGrantor());
PausibleTX pauseTXRunnable = new PausibleTX();
pauseTXRunnable.rgnName = rgnName;
pauseTXRunnable.myCache = getCache();
pauseTXRunnable.key = "key";
pauseTXRunnable.value = "val3";
new Thread(pauseTXRunnable, "PausibleTX Thread").start();
synchronized (PausibleTX.class) {
while (!pauseTXRunnable.getIsRunning()) {
try {
PausibleTX.class.wait();
} catch (InterruptedException ie) {
fail("Did not expect " + ie);
}
}
}
}
});
// Verify that the lock batch exists VM0 and has the size we expect
vm0.invoke(new SerializableRunnable(
"testLockBatchParticipantsUpdate: Verify lock batch exists on VM0 with expected size") {
@Override
public void run() {
getCache().getRegion(rgnName);
TXLockServiceImpl dtls = (TXLockServiceImpl) TXLockService.getDTLS();
assertNotNull(dtls);
assertTrue(dtls.isLockGrantor());
DLockService dLockSvc = dtls.getInternalDistributedLockService();
assertNotNull(TXDistributedDUnitTest.txHostId);
DLockBatch[] batches = dLockSvc.getGrantor()
.getLockBatches((InternalDistributedMember) TXDistributedDUnitTest.txHostId);
assertEquals(batches.length, 1);
TXLockBatch txLockBatch = (TXLockBatch) batches[0];
assertNotNull(txLockBatch);
assertNotNull(TXDistributedDUnitTest.preTXSystemIds);
assertTrue(
"Members in lock batch " + txLockBatch.getParticipants() + " not the same as "
+ TXDistributedDUnitTest.preTXSystemIds,
txLockBatch.getParticipants().equals(TXDistributedDUnitTest.preTXSystemIds));
}
});
// Start a GII process on VM2
vm2.invoke(new SerializableRunnable("testLockBatchParticipantsUpdate: start GII") {
@Override
public void run() {
try {
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setEarlyAck(false);
factory.setDataPolicy(DataPolicy.REPLICATE);
getCache().createRegion(rgnName, factory.create());
} catch (CacheException e) {
Assert.fail("While creating region", e);
}
}
});
// Notify TX on VM1 so that it can continue
vm1.invoke(
new SerializableRunnable("testLockBatchParticipantsUpdate: Notfiy VM1 TX to continue") {
@Override
public void run() {
synchronized (PausibleTX.class) {
// Notify VM1 that it should proceed to the TX send
PausibleTX.class.notifyAll();
// Wait until VM1 has sent the TX
try {
PausibleTX.class.wait();
} catch (InterruptedException ie) {
fail("Did not expect " + ie);
}
}
}
});
// Verify that the batch on VM0 has added VM2 into the set
vm0.invoke(new SerializableRunnable(
"testLockBatchParticipantsUpdate: Verify lock batch contains VM2") {
@Override
public void run() {
getCache().getRegion(rgnName);
TXLockServiceImpl dtls = (TXLockServiceImpl) TXLockService.getDTLS();
assertNotNull(dtls);
assertTrue(dtls.isLockGrantor());
DLockService dLockSvc = dtls.getInternalDistributedLockService();
assertNotNull(TXDistributedDUnitTest.txHostId);
DLockBatch[] batches = dLockSvc.getGrantor()
.getLockBatches((InternalDistributedMember) TXDistributedDUnitTest.txHostId);
assertEquals(batches.length, 1);
TXLockBatch txLockBatch = (TXLockBatch) batches[0];
assertNotNull(txLockBatch);
assertNotNull(TXDistributedDUnitTest.preTXSystemIds);
assertTrue(
"Members in lock batch " + txLockBatch.getParticipants() + " not the same as "
+ TXDistributedDUnitTest.postTXSystemIds,
txLockBatch.getParticipants().equals(TXDistributedDUnitTest.postTXSystemIds));
}
});
// fix for bug 38843 causes the DTLS to be created in every TX participant
assertNotNull(TXLockService.getDTLS());
assertFalse(TXLockService.getDTLS().isLockGrantor());
assertEquals("val3", rgn.getEntry("key").getValue());
// Notify TX on VM1 that it can go ahead and complete the TX
vm1.invoke(
new SerializableRunnable("testLockBatchParticipantsUpdate: Notfiy VM1 TX to finish") {
@Override
public void run() {
synchronized (PausibleTX.class) {
// Notify VM1 that it should finish the TX
PausibleTX.class.notifyAll();
}
}
});
rgn.destroyRegion();
}
/**
* Hitachi bug 38809: Applying an exception to a remote VM fails due to an IOException on a Region
* configured for LRU Overflow
*/
public static final String TROUBLE_KEY = "GoBoom";
public static class TXTroubleMaker implements LocalRegion.TestCallable {
// private final Region r;
@Override
public void call(LocalRegion r, Operation op, RegionEntry re) {
if (TROUBLE_KEY.equals(re.getKey())) {
throw new DiskAccessException(TROUBLE_KEY, r);
}
}
}
public static class ShutdownListener implements ResourceEventsListener {
CountDownLatch latch = new CountDownLatch(1);
@Override
public void handleEvent(ResourceEvent event, Object resource) {
if (event.equals(ResourceEvent.CACHE_REMOVE)) {
try {
latch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
public void unblockShutdown() {
this.latch.countDown();
}
}
@Ignore("TODO: Disabled for #51260")
@Test
public void testRemoteCommitFailure() throws Exception {
try {
disconnectAllFromDS();
final String rgnName1 = getUniqueName() + "_1";
final String rgnName2 = getUniqueName() + "_2";
final String diskStoreName = getUniqueName() + "_store";
Host host = Host.getHost(0);
VM origin = host.getVM(0);
VM trouble1 = host.getVM(1);
VM trouble2 = host.getVM(2);
VM noTrouble = host.getVM(3);
CacheSerializableRunnable initRegions =
new CacheSerializableRunnable("Initialize no trouble regions") {
@Override
public void run2() {
getCache().createDiskStoreFactory().setDiskDirs(getDiskDirs()).create(diskStoreName);
TXManagerImpl.ALLOW_PERSISTENT_TRANSACTIONS = true;
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE);
af.setScope(Scope.DISTRIBUTED_ACK);
af.setDiskStoreName(diskStoreName);
getCache().createRegion(rgnName1, af.create());
getCache().createRegion(rgnName2, af.create());
}
};
origin.invoke(initRegions);
noTrouble.invoke(initRegions);
SerializableRunnable initTroulbeRegions =
new CacheSerializableRunnable("Initialize regions that cause trouble") {
@Override
public void run2() {
GemFireCacheImpl gfc = (GemFireCacheImpl) getCache();
InternalRegionArguments ira =
new InternalRegionArguments().setTestCallable(new TXTroubleMaker());
try {
getCache().createDiskStoreFactory().setDiskDirs(getDiskDirs())
.create(diskStoreName);
TXManagerImpl.ALLOW_PERSISTENT_TRANSACTIONS = true;
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE);
af.setScope(Scope.DISTRIBUTED_ACK);
af.setDiskStoreName(diskStoreName);
gfc.createVMRegion(rgnName1, af.create(), ira);
gfc.createVMRegion(rgnName2, af.create(), ira);
gfc.getInternalDistributedSystem().addResourceListener(new ShutdownListener());
} catch (IOException ioe) {
fail(ioe.toString());
} catch (TimeoutException e) {
fail(e.toString());
} catch (ClassNotFoundException e) {
fail(e.toString());
}
}
};
trouble1.invoke(initTroulbeRegions);
trouble2.invoke(initTroulbeRegions);
SerializableRunnable doTransaction =
new CacheSerializableRunnable("Run failing transaction") {
@Override
public void run2() {
Cache c = getCache();
Region r1 = c.getRegion(rgnName1);
assertNotNull(r1);
Region r2 = c.getRegion(rgnName2);
assertNotNull(r2);
CacheTransactionManager txmgr = c.getCacheTransactionManager();
txmgr.begin();
r1.put("k1", "k1");
r1.put("k2", "k2");
r1.put(TROUBLE_KEY, TROUBLE_KEY);
r2.put("k1", "k1");
r2.put("k2", "k2");
r2.put(TROUBLE_KEY, TROUBLE_KEY);
try {
txmgr.commit();
fail("Expected an tx incomplete exception");
} catch (CommitIncompleteException yay) {
String msg = yay.getMessage();
// getLogWriter().info("failing exception", yay);
// Each region on a trouble VM should be mentioned (two regions per trouble VM)
int ind = 0, match = 0;
while ((ind = msg.indexOf(rgnName1, ind)) >= 0) {
ind++;
match++;
}
assertEquals(2, match);
ind = match = 0;
while ((ind = msg.indexOf(rgnName2, ind)) >= 0) {
ind++;
match++;
}
assertEquals(2, match);
// DiskAccessExcpetions should be mentioned four times
ind = match = 0;
while ((ind = msg.indexOf(DiskAccessException.class.getName(), ind)) >= 0) {
ind++;
match++;
}
assertEquals(4, match);
}
}
};
IgnoredException ee = null;
try {
ee = IgnoredException.addIgnoredException(
DiskAccessException.class.getName() + "|" + CommitIncompleteException.class.getName()
+ "|" + CommitReplyException.class.getName());
origin.invoke(doTransaction);
} finally {
if (ee != null)
ee.remove();
}
SerializableCallable allowCacheToShutdown = new SerializableCallable() {
@Override
public Object call() throws Exception {
GemFireCacheImpl cache = (GemFireCacheImpl) getCache();
List<ResourceEventsListener> listeners =
cache.getInternalDistributedSystem().getResourceListeners();
for (ResourceEventsListener l : listeners) {
if (l instanceof ShutdownListener) {
ShutdownListener shutListener = (ShutdownListener) l;
shutListener.unblockShutdown();
}
}
return null;
}
};
trouble1.invoke(allowCacheToShutdown);
trouble2.invoke(allowCacheToShutdown);
// Assert proper content on failing VMs
SerializableRunnable assertTroubledContent =
new CacheSerializableRunnable("Assert partail commit data") {
@Override
public void run2() {
final Cache c = getCache();
GeodeAwaitility.await().untilAsserted(new WaitCriterion() {
@Override
public boolean done() {
return c.getRegion(rgnName1) == null;
}
@Override
public String description() {
return null;
}
});
Region r2 = c.getRegion(rgnName2);
assertNull(r2);
}
};
trouble1.invoke(assertTroubledContent);
trouble2.invoke(assertTroubledContent);
// Assert proper content on successful VMs
SerializableRunnable assertSuccessfulContent =
new CacheSerializableRunnable("Assert complete commit of data on successful VMs") {
@Override
public void run2() {
Cache c = getCache();
{
Region r1 = c.getRegion(rgnName1);
assertNotNull(r1);
assertEquals("k1", r1.getEntry("k1").getValue());
assertEquals("k2", r1.getEntry("k2").getValue());
assertEquals(TROUBLE_KEY, r1.getEntry(TROUBLE_KEY).getValue());
}
{
Region r2 = c.getRegion(rgnName2);
assertNotNull(r2);
assertEquals("k1", r2.getEntry("k1").getValue());
assertEquals("k2", r2.getEntry("k2").getValue());
assertEquals(TROUBLE_KEY, r2.getEntry(TROUBLE_KEY).getValue());
}
}
};
noTrouble.invoke(assertSuccessfulContent);
// Assert no content on originating VM
SerializableRunnable assertNoContent =
new CacheSerializableRunnable("Assert data survives on origin VM") {
@Override
public void run2() {
Cache c = getCache();
{
Region r1 = c.getRegion(rgnName1);
assertNotNull(r1);
assertNotNull(r1.getEntry("k1"));
assertNotNull(r1.getEntry("k2"));
assertNotNull(r1.getEntry(TROUBLE_KEY));
}
{
Region r2 = c.getRegion(rgnName2);
assertNotNull(r2);
assertNotNull(r2.getEntry("k1"));
assertNotNull(r2.getEntry("k2"));
assertNotNull(r2.getEntry(TROUBLE_KEY));
}
}
};
origin.invoke(assertNoContent);
} finally {
Invoke.invokeInEveryVM(new SerializableCallable() {
@Override
public Object call() throws Exception {
TXManagerImpl.ALLOW_PERSISTENT_TRANSACTIONS = false;
return null;
}
});
}
}
}
| |
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.security.identity;
import android.content.Context;
import android.security.keystore.KeyGenParameterSpec;
import android.security.keystore.KeyProperties;
import androidx.annotation.NonNull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collection;
import co.nstant.in.cbor.CborBuilder;
import co.nstant.in.cbor.CborEncoder;
import co.nstant.in.cbor.CborException;
import co.nstant.in.cbor.builder.ArrayBuilder;
import co.nstant.in.cbor.builder.MapBuilder;
import co.nstant.in.cbor.model.DataItem;
import co.nstant.in.cbor.model.UnicodeString;
class SoftwareWritableIdentityCredential extends WritableIdentityCredential {
private static final String TAG = "SoftwareWritableIdentityCredential";
private KeyPair mKeyPair = null;
private Collection<X509Certificate> mCertificates = null;
private String mDocType;
private String mCredentialName;
private Context mContext;
SoftwareWritableIdentityCredential(Context context,
@NonNull String credentialName,
@NonNull String docType) throws AlreadyPersonalizedException {
mContext = context;
mDocType = docType;
mCredentialName = credentialName;
if (CredentialData.credentialAlreadyExists(context, credentialName)) {
throw new AlreadyPersonalizedException("Credential with given name already exists");
}
}
/**
* Generates CredentialKey.
*
* If called a second time on the same object, does nothing and returns null.
*
* @param challenge The attestation challenge.
* @return Attestation mCertificate chain or null if called a second time.
* @throws AlreadyPersonalizedException if this credential has already been personalized.
* @throws CipherSuiteNotSupportedException if the cipher suite is not supported
* @throws IdentityCredentialException if unable to communicate with secure hardware.
*/
private Collection<X509Certificate> ensureCredentialKey(byte[] challenge) {
if (mKeyPair != null) {
return null;
}
String aliasForCredential = CredentialData.getAliasFromCredentialName(mCredentialName);
try {
KeyStore ks = KeyStore.getInstance("AndroidKeyStore");
ks.load(null);
if (ks.containsAlias(aliasForCredential)) {
ks.deleteEntry(aliasForCredential);
}
// TODO: We most likely want to constrain the life of CredentialKey (through
// setKeyValidityStart() and setKeyValidityEnd()) so it's limited to e.g. 5 years
// or how long the credential might be valid. For US driving licenses it's typically
// up to 5 years, where it expires on your birth day).
//
// This is likely something the issuer would want to specify.
KeyPairGenerator kpg = KeyPairGenerator.getInstance(
KeyProperties.KEY_ALGORITHM_EC, "AndroidKeyStore");
KeyGenParameterSpec.Builder builder = new KeyGenParameterSpec.Builder(
aliasForCredential,
KeyProperties.PURPOSE_SIGN | KeyProperties.PURPOSE_VERIFY)
.setDigests(KeyProperties.DIGEST_SHA256, KeyProperties.DIGEST_SHA512);
// Attestation is only available in Nougat and onwards.
if (challenge == null) {
challenge = new byte[0];
}
builder.setAttestationChallenge(challenge);
kpg.initialize(builder.build());
mKeyPair = kpg.generateKeyPair();
Certificate[] certificates = ks.getCertificateChain(aliasForCredential);
mCertificates = new ArrayList<>();
for (Certificate certificate : certificates) {
mCertificates.add((X509Certificate) certificate);
}
} catch (InvalidAlgorithmParameterException
| NoSuchAlgorithmException
| NoSuchProviderException
| CertificateException
| KeyStoreException
| IOException e) {
throw new RuntimeException("Error creating CredentialKey", e);
}
return mCertificates;
}
@Override
public @NonNull Collection<X509Certificate> getCredentialKeyCertificateChain(
@NonNull byte[] challenge) {
Collection<X509Certificate> certificates = ensureCredentialKey(challenge);
if (certificates == null) {
throw new RuntimeException(
"getCredentialKeyCertificateChain() must be called before personalize()");
}
return certificates;
}
// Returns COSE_Sign1 with payload set to ProofOfProvisioning
static DataItem buildProofOfProvisioningWithSignature(String docType,
PersonalizationData personalizationData,
PrivateKey key) {
CborBuilder accessControlProfileBuilder = new CborBuilder();
ArrayBuilder<CborBuilder> arrayBuilder = accessControlProfileBuilder.addArray();
for (AccessControlProfile profile : personalizationData.getAccessControlProfiles()) {
arrayBuilder.add(Util.accessControlProfileToCbor(profile));
}
CborBuilder dataBuilder = new CborBuilder();
MapBuilder<CborBuilder> dataMapBuilder = dataBuilder.addMap();
for (PersonalizationData.NamespaceData namespaceData :
personalizationData.getNamespaceDatas()) {
dataMapBuilder.put(
new UnicodeString(namespaceData.getNamespaceName()),
Util.namespaceDataToCbor(namespaceData));
}
CborBuilder signedDataBuilder = new CborBuilder();
signedDataBuilder.addArray()
.add("ProofOfProvisioning")
.add(docType)
.add(accessControlProfileBuilder.build().get(0))
.add(dataBuilder.build().get(0))
.add(false);
DataItem signature;
try {
ByteArrayOutputStream dtsBaos = new ByteArrayOutputStream();
CborEncoder dtsEncoder = new CborEncoder(dtsBaos);
dtsEncoder.encode(signedDataBuilder.build().get(0));
byte[] dataToSign = dtsBaos.toByteArray();
signature = Util.coseSign1Sign(key,
dataToSign,
null,
null);
} catch (NoSuchAlgorithmException
| InvalidKeyException
| CertificateEncodingException
| CborException e) {
throw new RuntimeException("Error building ProofOfProvisioning", e);
}
return signature;
}
@NonNull
@Override
public byte[] personalize(@NonNull PersonalizationData personalizationData) {
try {
ensureCredentialKey(null);
DataItem signature = buildProofOfProvisioningWithSignature(mDocType,
personalizationData,
mKeyPair.getPrivate());
byte[] proofOfProvisioning = Util.coseSign1GetData(signature);
byte[] proofOfProvisioningSha256 = MessageDigest.getInstance("SHA-256").digest(
proofOfProvisioning);
CredentialData.createCredentialData(
mContext,
mDocType,
mCredentialName,
CredentialData.getAliasFromCredentialName(mCredentialName),
mCertificates,
personalizationData,
proofOfProvisioningSha256,
false);
return Util.cborEncode(signature);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("Error digesting ProofOfProvisioning", e);
}
}
}
| |
/*
* Copyright 1997-2011 teatrove.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teatrove.tea.compiler;
import java.lang.reflect.Array;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.TypeVariable;
import java.lang.reflect.WildcardType;
/**
*
* @author Nick Hagan
*/
public class Generics {
private Generics() {
super();
}
@SuppressWarnings("rawtypes")
public static Type findType(Type type, Type... parents) {
// ignore if no parent type
if (parents == null || parents.length == 0) {
return null;
}
// get generic types and verify type variable
java.lang.reflect.Type ttype = type.getGenericClass();
if (!(ttype instanceof TypeVariable)) {
return null;
}
// check if type can be resolved
TypeVariable tvar = (TypeVariable) ttype;
for (java.lang.reflect.Type bounds : tvar.getBounds()) {
if (bounds instanceof Class && !Object.class.equals(bounds)) {
return new Type((Class) bounds);
}
}
// search each parent type
for (Type parent : parents) {
TypeVariable[] tvars = parent.getObjectClass().getTypeParameters();
for (int i = 0; i < tvars.length; i++) {
if (tvar.getName().equals(tvars[i].getName())) {
java.lang.reflect.Type ptype = parent.getGenericClass();
if (ptype instanceof ParameterizedType) {
ParameterizedType pptype = (ParameterizedType) ptype;
java.lang.reflect.Type[] ptypes =
pptype.getActualTypeArguments();
if (i < ptypes.length) {
return getUnderlyingType(ptypes[i]);
}
}
}
}
}
// no matching type found
return null;
}
@SuppressWarnings("rawtypes")
public static Class<?> getRawType(ParameterizedType type) {
// get the raw object associated with the template
java.lang.reflect.Type rawtype = type.getRawType();
// ensure valid type
if (rawtype instanceof Class) {
return (Class) rawtype;
}
// raw type should only be classes
else {
throw new IllegalStateException
(
"ParameterizedType cannot have a non-Class " +
"based rawType: " + rawtype
);
}
}
public static Type getBoundedType(TypeVariable<?> type) {
// fail if missing bounds
if (type.getBounds().length == 0) {
throw new IllegalStateException
(
"TypeVariable has missing bounds information: " + type
);
}
// get the bounds around the type to resolve to actual type
java.lang.reflect.Type ttype = type.getBounds()[0];
return getUnderlyingType(ttype);
}
public static Type getBoundedType(WildcardType type) {
// fail if missing bounds
if (type.getUpperBounds().length == 0) {
throw new IllegalStateException
(
"WildcardType has missing bounds information: " + type
);
}
// get the bounds around the type to resolve to actual type
java.lang.reflect.Type ttype = type.getUpperBounds()[0];
return getUnderlyingType(ttype);
}
@SuppressWarnings("rawtypes")
public static Type getUnderlyingType(java.lang.reflect.Type ttype) {
// handle <? extends Object> case
if (ttype instanceof Class) {
return new Type((Class) ttype);
}
// handle <? extends E> case
else if (ttype instanceof TypeVariable) {
// get underlying bounded type
return getBoundedType((TypeVariable) ttype);
}
// handle <? extends Template<Object>> case
else if (ttype instanceof ParameterizedType) {
// get the underlying parameterized type
Class<?> ptype = getRawType((ParameterizedType) ttype);
return new Type(ptype, ttype);
}
// handle <? extends Template<Object>[]> case
else if (ttype instanceof GenericArrayType) {
// find root type
Class<?> ctype = getComponentType((GenericArrayType) ttype);
return new Type(ctype, ttype);
}
// handle <? extends E> case
else if (ttype instanceof WildcardType) {
java.lang.reflect.Type[] ubounds =
((WildcardType) ttype).getUpperBounds();
if (ubounds != null && ubounds.length > 0) {
return getUnderlyingType(ubounds[0]);
}
java.lang.reflect.Type[] lbounds =
((WildcardType) ttype).getLowerBounds();
if (lbounds != null && lbounds.length > 0) {
return getUnderlyingType(lbounds[0]);
}
}
// unknown type
throw new IllegalStateException
(
"type has invalid bounds type: " + ttype
);
}
@SuppressWarnings("rawtypes")
public static Class<?> getComponentType(GenericArrayType type) {
// find root type and count dimensions
int levels = 0;
java.lang.reflect.Type ctype = type;
while (ctype instanceof GenericArrayType) {
levels++;
ctype = ((GenericArrayType) ctype).getGenericComponentType();
}
// handle <E extends Object[]> case
if (ctype instanceof Class) {
return Array.newInstance((Class) ctype, new int[levels]).getClass();
}
// handle <E extends Template<Object>[]> case
else if (ctype instanceof ParameterizedType) {
// get the raw object associated with the template
Class<?> rawtype = getRawType((ParameterizedType) ctype);
return Array.newInstance(rawtype, new int[levels]).getClass();
}
// handle <E extends F[]> case
else if (ctype instanceof TypeVariable) {
// get the bounded underlying type
Type jtype = getBoundedType((TypeVariable) ctype);
return Array.newInstance(jtype.getObjectClass(), new int[levels])
.getClass();
}
// should not be any other way
else {
throw new IllegalStateException
(
"GenericArrayType has invalid component " +
"type: " + ctype
);
}
}
public static Type getIterationType(Type type) {
return getIterationType(type.getGenericClass());
}
@SuppressWarnings("rawtypes")
public static Type getIterationType(java.lang.reflect.Type generic) {
// handle parameterized cases (List<E>)
if (generic instanceof ParameterizedType) {
// find and return the actual subtype
java.lang.reflect.Type[] subtypes =
((ParameterizedType) generic).getActualTypeArguments();
if (subtypes != null && subtypes.length >= 1) {
java.lang.reflect.Type subtype = subtypes[0];
// handle List<Object> case
if (subtype instanceof Class) {
return new Type((Class) subtype);
}
// handle List<Template<Object>> case
else if (subtype instanceof ParameterizedType) {
return new Type(getRawType((ParameterizedType) subtype),
subtype);
}
// handle List<E> case where E is defined at the type level
else if (subtype instanceof TypeVariable) {
return getBoundedType((TypeVariable) subtype);
}
// handle List<?> case
else if (subtype instanceof WildcardType) {
return getBoundedType((WildcardType) subtype);
}
// handle List<Object[]> case
else if (subtype instanceof GenericArrayType) {
// find root type
Class<?> ctype = getComponentType((GenericArrayType) subtype);
return new Type(ctype, subtype);
}
}
}
// handle actual classes and look for supercase
else if (generic instanceof Class) {
return getIterationType(((Class) generic).getGenericSuperclass());
}
// unknown type, so return object
return Type.OBJECT_TYPE;
}
public static Type getKeyType(Type type) {
return getIterationType(type);
}
public static Type getKeyType(java.lang.reflect.Type generic) {
return getIterationType(generic);
}
public static Type getValueType(Type type) {
return getValueType(type.getGenericClass());
}
@SuppressWarnings("rawtypes")
public static Type getValueType(java.lang.reflect.Type generic) {
// handle parameterized cases (List<E>)
if (generic instanceof ParameterizedType) {
// find and return the actual subtype
java.lang.reflect.Type[] subtypes =
((ParameterizedType) generic).getActualTypeArguments();
if (subtypes != null && subtypes.length >= 1) {
java.lang.reflect.Type subtype = subtypes[1];
// handle List<Object> case
if (subtype instanceof Class) {
return new Type((Class) subtype);
}
// handle List<Template<Object>> case
else if (subtype instanceof ParameterizedType) {
return new Type(getRawType((ParameterizedType) subtype),
subtype);
}
// handle List<E> case where E is defined at the type level
else if (subtype instanceof TypeVariable) {
return getBoundedType((TypeVariable) subtype);
}
// handle List<?> case
else if (subtype instanceof WildcardType) {
return getBoundedType((WildcardType) subtype);
}
// handle List<Object[]> case
else if (subtype instanceof GenericArrayType) {
// find root type
Class<?> ctype = getComponentType((GenericArrayType) subtype);
return new Type(ctype, subtype);
}
}
}
// unknown type, so return object
return Type.OBJECT_TYPE;
}
}
| |
package org.cipres.treebase.web.controllers;
import java.io.IOException;
import java.text.ParseException;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.cipres.treebase.TreebaseIDString;
import org.cipres.treebase.TreebaseUtil;
import org.cipres.treebase.TreebaseIDString.MalformedTreebaseIDString;
import org.cipres.treebase.domain.search.SearchResults;
import org.cipres.treebase.domain.search.SearchResultsType;
import org.cipres.treebase.domain.search.StudySearchResults;
import org.cipres.treebase.domain.study.Study;
import org.cipres.treebase.domain.study.StudyService;
import org.cipres.treebase.domain.study.Submission;
import org.cipres.treebase.domain.study.SubmissionService;
import org.cipres.treebase.web.Constants;
import org.cipres.treebase.web.model.Identify;
import org.cipres.treebase.web.util.RequestMessageSetter;
import org.cipres.treebase.web.util.IdentifyUtil;
import org.springframework.validation.BindException;
import org.springframework.web.servlet.ModelAndView;
import org.z3950.zing.cql.CQLAndNode;
import org.z3950.zing.cql.CQLBooleanNode;
import org.z3950.zing.cql.CQLNode;
import org.z3950.zing.cql.CQLNotNode;
import org.z3950.zing.cql.CQLOrNode;
import org.z3950.zing.cql.CQLParseException;
import org.z3950.zing.cql.CQLParser;
import org.z3950.zing.cql.CQLRelation;
import org.z3950.zing.cql.CQLTermNode;
import java.text.DateFormat;
/**
* StudySearchController.java
*
* Created on Feb 14, 2007
*
* @author mjd
*
*/
public class StudySearchController extends SearchController {
/**
* Logger for this class
*/
private static final Logger LOGGER = Logger.getLogger(StudySearchController.class);
protected String mValidateTaxaView;
private Identify identify;
public String getValidateTaxaView() {
return mValidateTaxaView;
}
public void setValidateTaxaView(String validateTaxaView) {
mValidateTaxaView = validateTaxaView;
}
/**
*
* The API right now supports only a list of authors, so we have to update the list instead of
* individual authors
*
* Delete: remove associate of person from citation, but the person is _not_ deleted from the
* database
*
* @param request
* @param response
* @param command
* @return
*/
private enum SearchType {
inAbstract,
inCitation,
byAuthorName,
byID,
byLegacyID,
byTitle,
byKeyword,
byJournal,
byCreationDate,
byPublicationDate,
byReleaseDate,
byLastModifiedDate,
byDOI
}
protected ModelAndView onSubmit(
HttpServletRequest request,
HttpServletResponse response,
Object command,
BindException errors) throws Exception {
LOGGER.info("in StudySearchController.onSubmit");
clearMessages(request);
String formName = request.getParameter("formName");
String query = request.getParameter("query");
LOGGER.info("formName is '" + formName + "'");
if ( ! TreebaseUtil.isEmpty(query) && ! query.equals("")) {
LOGGER.info("query is '" + query + "'");
return this.handleQueryRequest(request, response, errors, query);
}
if (formName.equals("searchKeyword")) {
SearchType searchType;
String buttonName = request.getParameter("searchButton");
String searchTerm = convertStars(request.getParameter("searchTerm"));
StudySearchResults oldRes;
{
SearchResults<?> sr = searchResults(request);
if (sr != null) {
oldRes = (StudySearchResults) sr.convertToStudies();
} else {
oldRes = new StudySearchResults (); // TODO: Convert existing search results to new type
}
}
if (buttonName.equals("authorKeyword")) {
searchType = SearchType.byAuthorName;
} else if (buttonName.equals("studyID")) {
searchType = SearchType.byID;
} else if (buttonName.equals("legacyStudyID")) {
searchType = SearchType.byLegacyID;
} else if (buttonName.equals("titleKeyword")) {
searchType = SearchType.byTitle;
} else if (buttonName.equals("textKeyword")) {
searchType = SearchType.byKeyword;
} else if (buttonName.equals("citationKeyword")) {
searchType = SearchType.inCitation;
} else if (buttonName.equals("abstractKeyword")) {
searchType = SearchType.inAbstract;
} else if (buttonName.equals("doiKeyword")) {
searchType = SearchType.byDOI;
}
else {
throw new Error("Unknown search button name '" + buttonName + "'");
}
// XXX we now never do an exact match with terms provided through the web app. We can change
// this, e.g. by adding a check box whose value is the boolean argument of doSearch()
Collection<Study> matches = doSearch(request, response, searchType, errors,searchTerm,false,null);
if ( TreebaseUtil.isEmpty(request.getParameter("format")) || ! request.getParameter("format").equals("rss1") ) {
SearchResults<Study> newRes = intersectSearchResults(oldRes, new StudySearchResults(matches),
new RequestMessageSetter(request), "No matching studies found");
saveSearchResults(request, newRes);
return new ModelAndView("search/studySearch", Constants.RESULT_SET, newRes);
}
else {
return this.searchResultsAsRDF(new StudySearchResults(matches), request, null,"study","study");
}
}
else {
return super.onSubmit(request, response, command, errors);
}
}
protected Set<Study> doCQLQuery(
CQLNode node,
Set<Study> results,
HttpServletRequest request,
HttpServletResponse response,
BindException errors
) throws InstantiationException, ParseException {
if ( node instanceof CQLBooleanNode ) {
Set<Study> resultsLeft = doCQLQuery(((CQLBooleanNode)node).left,results, request, response, errors);
Set<Study> resultsRight = doCQLQuery(((CQLBooleanNode)node).right,results, request, response, errors);
if ( node instanceof CQLNotNode ) {
Set<Study> resultsDifference = new HashSet<Study>();
for ( Study leftStudy : resultsLeft ) {
if ( ! resultsRight.contains(leftStudy) )
resultsDifference.add(leftStudy);
}
resultsLeft = resultsDifference;
}
else if ( node instanceof CQLOrNode ) {
resultsLeft.addAll(resultsRight);
}
else if ( node instanceof CQLAndNode ) {
Set<Study> resultsUnion = new HashSet<Study>();
for ( Study leftStudy : resultsLeft ) {
if ( resultsRight.contains(leftStudy) )
resultsUnion.add(leftStudy);
}
resultsLeft = resultsUnion;
}
results = resultsLeft;
}
else if ( node instanceof CQLTermNode ) {
CQLTermNode term = (CQLTermNode)node;
boolean exactMatch = term.getRelation().getBase().equals("==");
CQLRelation relation = term.getRelation();
String index = term.getIndex();
if ( index.startsWith("tb.title") ) {
results.addAll(doSearch(request, response, SearchType.byTitle, errors, term.getTerm(),exactMatch,relation));
} else if ( index.equals("tb.identifier.study") ) {
results.addAll(doSearch(request, response, SearchType.byID, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("dcterms.contributor") ) {
results.addAll(doSearch(request, response, SearchType.byAuthorName, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("dcterms.abstract") ) {
results.addAll(doSearch(request, response, SearchType.inAbstract, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("dcterms.subject") ) {
results.addAll(doSearch(request, response, SearchType.byKeyword, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("dcterms.bibliographicCitation") ) {
results.addAll(doSearch(request, response, SearchType.inCitation, errors, term.getTerm(),exactMatch,relation));
} else if ( index.equals("tb.identifier.study.tb1") ) {
results.addAll(doSearch(request, response, SearchType.byLegacyID, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("prism.publicationName") ) {
results.addAll(doSearch(request, response, SearchType.byJournal, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("dc.date") ) {
results.addAll(doSearch(request,response, SearchType.byLastModifiedDate, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("prism.creationDate") ) {
results.addAll(doSearch(request,response, SearchType.byCreationDate, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("prism.publicationDate") ) {
results.addAll(doSearch(request,response, SearchType.byPublicationDate, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("prism.modificationDate") ) {
results.addAll(doSearch(request,response, SearchType.byLastModifiedDate, errors, term.getTerm(),exactMatch,relation));
} else if ( index.startsWith("prism.doi") ) {
results.addAll(doSearch(request,response,SearchType.byDOI, errors, term.getTerm(), exactMatch,relation));
}
else {
// issue warnings
addMessage(request, "Unsupported index: " + index);
}
}
logger.debug(node);
return results;
}
@SuppressWarnings("unchecked")
protected Collection<Study> doSearch(
HttpServletRequest request,
HttpServletResponse response,
SearchType searchType,
BindException errors,
String searchTerm,
boolean exactMatch, CQLRelation relation) throws InstantiationException, ParseException {
String keywordSearchTerm = "%" + searchTerm + "%";
Collection<Study> matches = new HashSet<Study>();
StudyService studyService = getSearchService().getStudyService();
SubmissionService submissionService = getSearchService().getSubmissionService();
switch (searchType) {
case byID:
matches = (Collection<Study>) doSearchByIDString(request, studyService, Study.class, searchTerm);
break;
case byLegacyID:
{
TreebaseIDString legacyID = null;
boolean malformed = false;
try {
legacyID = new TreebaseIDString(searchTerm, Study.class);
} catch (MalformedTreebaseIDString e) {
malformed = true;
}
if (malformed || legacyID.getTBClass() != Study.class) {
addMessage(request, "Legacy ID number '" + searchTerm + "' is not valid; try S#### or just ####");
matches = null;
break;
}
matches = (Collection<Study>) studyService.findByTBStudyID("S" + legacyID.getId().toString());
break;
}
case inAbstract:
matches = studyService.findByAbstract(keywordSearchTerm);
break;
case inCitation:
matches = studyService.findByCitation(keywordSearchTerm);
break;
case byAuthorName:
matches = studyService.findByAuthor(searchTerm);
break;
case byTitle:
matches = studyService.findByTitle(keywordSearchTerm);
break;
case byKeyword:
matches = studyService.findByKeyword(keywordSearchTerm);
break;
case byLastModifiedDate:
matches = findByLastModified(searchTerm, relation, submissionService);
break;
case byPublicationDate:
matches = findByPublicationDate(searchTerm, relation, studyService);
break;
case byCreationDate:
matches = findByCreationDate(searchTerm, relation, submissionService);
break;
case byDOI:
{
Study result = studyService.findByDOI(searchTerm);
if ( null != result ) {
matches.add(result);
}
break;
}
case byJournal:
{
if ( exactMatch ) {
matches = studyService.findByJournal(searchTerm, exactMatch);
} else {
matches = studyService.findByJournal(keywordSearchTerm, exactMatch);
}
break;
}
default:
throw new Error ("Unknown search type '" + searchType + "'");
}
return matches;
}
private Collection<Study> findByCreationDate(String searchTerm,
CQLRelation relation, SubmissionService submissionService) throws ParseException {
Collection<Study> matches;
Date from = null;
Date until = null;
//DateFormat df = DateFormat.getDateInstance();
if ( relation.getBase().startsWith(">") ) {
//from = df.parse(searchTerm);
from = IdentifyUtil.parseGranularity(identify.getGranularityPattern(), searchTerm);
until = new Date(); // i.e. now
}
else if ( relation.getBase().startsWith("<") ) {
from = new Date(0); // i.e. epoch
//until = df.parse(searchTerm);
until = IdentifyUtil.parseGranularity(identify.getGranularityPattern(), searchTerm);
}
Collection<Submission> submissions = submissionService.findSubmissionByCreateDateRange(from, until);
matches = new HashSet<Study>();
for ( Submission submission : submissions ) {
matches.add(submission.getStudy());
}
return matches;
}
private Collection<Study> findByPublicationDate(String searchTerm,
CQLRelation relation, StudyService studyService) throws ParseException {
Date from = null;
Date until = null;
//DateFormat df = DateFormat.getDateInstance();
if ( relation.getBase().startsWith(">") ) {
//from = df.parse(searchTerm);
from = IdentifyUtil.parseGranularity(identify.getGranularityPattern(), searchTerm);
until = new Date(); // i.e. now
}
else if ( relation.getBase().startsWith("<") ) {
from = new Date(0); // i.e. epoch
//until = df.parse(searchTerm);
until = IdentifyUtil.parseGranularity(identify.getGranularityPattern(), searchTerm);
}
return studyService.findByPublicationDateRange(from, until);
}
private Collection<Study> findByLastModified(String searchTerm,
CQLRelation relation, SubmissionService submissionService)
throws ParseException {
Collection<Study> matches;
Date from = null;
Date until = null;
//DateFormat df = DateFormat.getDateInstance();
if ( relation.getBase().startsWith(">") ) {
from = IdentifyUtil.parseGranularity(identify.getGranularityPattern(), searchTerm);
//from = df.parse(searchTerm);
until = new Date(); // i.e. now
}
else if ( relation.getBase().startsWith("<") ) {
from = new Date(0); // i.e. epoch
//until = df.parse(searchTerm);
until = IdentifyUtil.parseGranularity(identify.getGranularityPattern(), searchTerm);
}
Collection<Submission> submissions = submissionService.findSubmissionByLastModifiedDateRange(from, until);
matches = new HashSet<Study>();
for ( Submission submission : submissions ) {
matches.add(submission.getStudy());
}
return matches;
}
@Override
SearchResultsType currentSearchType() {
return SearchResultsType.STUDY;
}
@Override
public String getDefaultViewURL() {
return "studySearch.html";
}
@Override
protected ModelAndView handleQueryRequest(HttpServletRequest request,
HttpServletResponse response, BindException errors,String query)
throws CQLParseException, IOException, InstantiationException, ParseException {
//String query = request.getParameter("query");
CQLParser parser = new CQLParser();
CQLNode root = parser.parse(query);
root = normalizeParseTree(root);
Set<Study> queryResults = doCQLQuery(root, new HashSet<Study>(),request, response, errors);
StudySearchResults tsr = new StudySearchResults(queryResults);
if ( TreebaseUtil.isEmpty(request.getParameter("format")) || ! request.getParameter("format").equals("rss1") ) {
saveSearchResults(request, tsr);
return new ModelAndView("search/studySearch", Constants.RESULT_SET, tsr);
}
else {
SearchResults<?> res = tsr;
String schema = null;
if ( ! TreebaseUtil.isEmpty(request.getParameter("recordSchema")) ) {
schema = request.getParameter("recordSchema");
if ( schema.equals("tree") ) {
res = tsr.convertToTrees();
}
else if ( schema.equals("matrix") ) {
res = tsr.convertToMatrices();
}
else if ( schema.equals("taxon") ) {
res = tsr.convertToTaxa();
}
}
if (! request.getParameter("format").equals("rss1")) {
this.saveSearchResults(request, res);
}
return this.searchResultsAsRDF(res, request, root, schema, "study");
}
}
@Override
protected Map<String, String> getPredicateMapping() {
Map<String,String> mapping = new HashMap<String,String>();
mapping.put("dcterms.title", "tb.title.study");
mapping.put("dcterms.identifier", "tb.identifier.study");
return mapping;
}
public Identify getIdentify() {
return identify;
}
public void setIdentify(Identify identify) {
this.identify = identify;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.scan.v3.lifecycle;
import org.apache.drill.common.exceptions.CustomErrorContext;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.physical.impl.scan.RowBatchReader;
import org.apache.drill.exec.physical.impl.scan.v3.ReaderFactory;
import org.apache.drill.exec.physical.impl.scan.v3.ScanLifecycleBuilder;
import org.apache.drill.exec.physical.impl.scan.v3.schema.ScanSchemaConfigBuilder;
import org.apache.drill.exec.physical.impl.scan.v3.schema.ScanSchemaTracker;
import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
import org.apache.drill.exec.physical.resultSet.impl.ResultVectorCacheImpl;
import org.apache.drill.exec.record.metadata.TupleMetadata;
/**
/**
* Basic scan framework for a set of "managed" readers and which uses the
* scan schema tracker to evolve the scan output schema.
* Readers are created and managed via a reader
* factory class unique to each type of scan. The reader factory also provides
* the scan-specific schema negotiator to be passed to the reader.
*
* <h4>Lifecycle</h4>
*
* The options provided in the {@link ScanLifecycleBuilder} are
* sufficient to drive the entire scan operator functionality.
* Schema resolution and projection is done generically and is the same for all
* data sources. Only the
* reader (created via the factory class) differs from one type of file to
* another.
* <p>
* The framework achieves the work described below by composing a
* set of detailed classes, each of which performs some specific task. This
* structure leaves the reader to simply infer schema and read data.
*
* <h4>Reader Integration</h4>
*
* The details of how a file is structured, how a schema is inferred, how
* data is decoded: all that is encapsulated in the reader. The only real
* Interaction between the reader and the framework is:
* <ul>
* <li>The reader factory creates a reader and the corresponding schema
* negotiator.</li>
* <li>The reader "negotiates" a schema with the framework. The framework
* knows the projection list from the query plan, knows something about
* data types (whether a column should be scalar, a map or an array), and
* knows about the schema already defined by prior readers. The reader knows
* what schema it can produce (if "early schema.") The schema negotiator
* class handles this task.</li>
* <li>The reader reads data from the file and populates value vectors a
* batch at a time. The framework creates the result set loader to use for
* this work. The schema negotiator returns that loader to the reader, which
* uses it during read.<p>
* A reader may be "late schema", true "schema on read." In this case, the
* reader simply tells the result set loader to create a new column reader
* on the fly. The framework will work out if that new column is to be
* projected and will return either a real column writer (projected column)
* or a dummy column writer (unprojected column.)</li>
* <li>The reader then reads batches of data until all data is read. The
* result set loader signals when a batch is full; the reader should not
* worry about this detail itself.</li>
* <li>The reader then releases its resources.</li>
* </ul>
* <p>
* See {@link ScanSchemaTracker} for details about how the scan schema
* evolves over the scan lifecycle.
*
* <h4>Lifecycle</h4>
*
* Coordinates the components that make up a scan implementation:
* <ul>
* <li>{@link ScanSchemaTracker} which resolves the scan schema over the
* lifetime of the scan.</li>
* <li>Implicit columns manager which identifies and populates implicit
* file columns, partition columns, and Drill's internal metadata
* columns.</li>
* <li>The actual readers which load (possibly a subset of) the
* columns requested from the input source.</li>
* </ul>
* <p>
* Implicit columns are unique to each storage plugin. At present, they
* are defined only for the file system plugin. To handle such variation,
* each extension defines a subclass of the {@link ScanLifecycleBuilder} class to
* create the implicit columns manager (and schema negotiator) unique to
* a certain kind of scan.
* <p>
* Each reader is tracked by a {@link ReaderLifecycle} which handles:
* <ul>
* <li>Setting up the {@link ResultSetLoader} for the reader.</li>
* <li>The concrete values for implicit columns for that reader
* (and its file, if file-based.)</li>
* <li>The missing columns handler which "makes up" values for projected
* columns not read by the reader.</li>
* <li>Batch asssembler, which combines the three sources of vectors
* to create the output batch with the schema specified by the
* schema tracker.</li>
* </ul>
*
* <h4>Publishing the Final Result Set<h4>
*
* This class "publishes" a vector container that has the final, projected
* form of a scan. The projected schema include:
* <ul>
* <li>Columns from the reader.</li>
* <li>Static columns, such as implicit or partition columns.</li>
* <li>Null columns for items in the select list, but not found in either
* of the above two categories.</li>
* </ul>
* The order of columns is that set by the select list (or, by the reader for
* a <tt>SELECT *</tt> query.
*
* @see ScanSchemaTracker for a description of the schema lifecycle
* which drives a scan
*/
public class ScanLifecycle {
private final OperatorContext context;
private final ScanLifecycleBuilder options;
private final ScanSchemaTracker schemaTracker;
private final ReaderFactory<?> readerFactory;
private int batchCount;
private long rowCount;
/**
* Cache used to preserve the same vectors from one output batch to the
* next to keep the Project operator happy (which depends on exactly the
* same vectors.
* <p>
* If the Project operator ever changes so that it depends on looking up
* vectors rather than vector instances, this cache can be deprecated.
*/
private final ResultVectorCacheImpl vectorCache;
public ScanLifecycle(OperatorContext context, ScanLifecycleBuilder builder) {
this.context = context;
this.options = builder;
this.schemaTracker = new ScanSchemaConfigBuilder()
.projection(builder.projection())
.definedSchema(builder.definedSchema())
.providedSchema(builder.providedSchema())
.allowSchemaChange(builder.allowSchemaChange())
.build();
if (builder.schemaValidator() != null) {
builder.schemaValidator().validate(schemaTracker);
}
this.vectorCache = new ResultVectorCacheImpl(allocator(), false);
this.readerFactory = builder.readerFactory();
}
public OperatorContext context() { return context; }
public ScanLifecycleBuilder options() { return options; }
public ScanSchemaTracker schemaTracker() { return schemaTracker; }
public ResultVectorCacheImpl vectorCache() { return vectorCache; }
public ReaderFactory<?> readerFactory() { return readerFactory; }
public boolean hasOutputSchema() { return schemaTracker.isResolved(); }
public CustomErrorContext errorContext() { return options.errorContext(); }
public BufferAllocator allocator() { return context.getAllocator(); }
public int batchCount() { return batchCount; }
public long rowCount() { return rowCount; }
public void tallyBatch(int rowCount) {
batchCount++;
this.rowCount += rowCount;
}
public RowBatchReader nextReader() {
// Check limit. But, do at least one (zero row) batch
// to capture schema.
if (batchCount > 0 && rowCount >= options.limit()) {
return null;
}
if (!readerFactory.hasNext()) {
return null;
}
return new ReaderLifecycle(this, options.limit() - rowCount);
}
protected SchemaNegotiatorImpl newNegotiator(ReaderLifecycle readerLifecycle) {
return new SchemaNegotiatorImpl(readerLifecycle);
}
public TupleMetadata outputSchema() {
return schemaTracker.outputSchema();
}
public void close() { }
}
| |
/*
* Copyright 2019, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opencensus.exporter.metrics.ocagent;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.util.concurrent.MoreExecutors;
import io.grpc.BindableService;
import io.grpc.Server;
import io.grpc.ServerBuilder;
import io.grpc.netty.NettyServerBuilder;
import io.opencensus.common.Duration;
import io.opencensus.metrics.LabelKey;
import io.opencensus.metrics.LabelValue;
import io.opencensus.metrics.LongGauge;
import io.opencensus.metrics.LongGauge.LongPoint;
import io.opencensus.metrics.MetricRegistry;
import io.opencensus.metrics.Metrics;
import io.opencensus.proto.agent.common.v1.Node;
import io.opencensus.proto.agent.metrics.v1.ExportMetricsServiceRequest;
import io.opencensus.proto.metrics.v1.Metric;
import io.opencensus.stats.Aggregation;
import io.opencensus.stats.Aggregation.Distribution;
import io.opencensus.stats.BucketBoundaries;
import io.opencensus.stats.Measure.MeasureDouble;
import io.opencensus.stats.Measure.MeasureLong;
import io.opencensus.stats.Stats;
import io.opencensus.stats.StatsRecorder;
import io.opencensus.stats.View;
import io.opencensus.stats.View.Name;
import io.opencensus.stats.ViewManager;
import io.opencensus.tags.TagContext;
import io.opencensus.tags.TagKey;
import io.opencensus.tags.TagValue;
import io.opencensus.tags.Tagger;
import io.opencensus.tags.Tags;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Executor;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** End-to-end integration test for {@link OcAgentMetricsExporter}. */
@RunWith(JUnit4.class)
public class OcAgentMetricsExporterIntegrationTest {
private Server agent;
private FakeOcAgentMetricsServiceGrpcImpl fakeOcAgentMetricsServiceGrpc;
private static final String SERVICE_NAME = "integration-test";
private static final Duration RETRY_INTERVAL = Duration.create(2, 0);
private static final Duration EXPORT_INTERVAL = Duration.create(2, 0);
// The latency in milliseconds
private static final MeasureDouble M_LATENCY_MS =
MeasureDouble.create("repl/latency", "The latency in milliseconds per REPL loop", "ms");
// Counts the number of lines read.
private static final MeasureLong M_LINES_IN =
MeasureLong.create("repl/lines_in", "The number of lines read in", "1");
// Counts the number of non EOF(end-of-file) errors.
private static final MeasureLong M_ERRORS =
MeasureLong.create("repl/errors", "The number of errors encountered", "1");
// Counts/groups the lengths of lines read in.
private static final MeasureLong M_LINE_LENGTHS =
MeasureLong.create("repl/line_lengths", "The distribution of line lengths", "By");
// The tag "method"
private static final TagKey KEY_METHOD = TagKey.create("method");
// Defining the distribution aggregations
private static final Aggregation LATENCY_DISTRIBUTION =
Distribution.create(
BucketBoundaries.create(
Arrays.asList(
// [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms,
// >=1s, >=2s, >=4s, >=6s]
0.0,
25.0,
50.0,
75.0,
100.0,
200.0,
400.0,
600.0,
800.0,
1000.0,
2000.0,
4000.0,
6000.0)));
private static final Aggregation LENGTH_DISTRIBUTION =
Distribution.create(
BucketBoundaries.create(
Arrays.asList(
// [>=0B, >=5B, >=10B, >=20B, >=40B, >=60B, >=80B, >=100B, >=200B, >=400B,
// >=600B,
// >=800B, >=1000B]
0.0,
5.0,
10.0,
20.0,
40.0,
60.0,
80.0,
100.0,
200.0,
400.0,
600.0,
800.0,
1000.0)));
// Define the count aggregation
private static final Aggregation COUNT = Aggregation.Count.create();
// Empty column
private static final List<TagKey> NO_KEYS = Collections.emptyList();
// Define the views
private static final List<View> VIEWS =
Arrays.asList(
View.create(
Name.create("ocjavametrics/latency"),
"The distribution of latencies",
M_LATENCY_MS,
LATENCY_DISTRIBUTION,
Collections.singletonList(KEY_METHOD)),
View.create(
Name.create("ocjavametrics/lines_in"),
"The number of lines read in from standard input",
M_LINES_IN,
COUNT,
NO_KEYS),
View.create(
Name.create("ocjavametrics/errors"),
"The number of errors encountered",
M_ERRORS,
COUNT,
Collections.singletonList(KEY_METHOD)),
View.create(
Name.create("ocjavametrics/line_lengths"),
"The distribution of line lengths",
M_LINE_LENGTHS,
LENGTH_DISTRIBUTION,
NO_KEYS));
private static final Random random = new Random();
private static final Tagger tagger = Tags.getTagger();
private static final StatsRecorder statsRecorder = Stats.getStatsRecorder();
private static final ViewManager viewManager = Stats.getViewManager();
private static final MetricRegistry metricRegistry = Metrics.getMetricRegistry();
@Before
public void setUp() {
fakeOcAgentMetricsServiceGrpc = new FakeOcAgentMetricsServiceGrpcImpl();
agent = getServer(OcAgentMetricsExporter.DEFAULT_END_POINT, fakeOcAgentMetricsServiceGrpc);
}
@After
public void tearDown() {
agent.shutdown();
}
@Test
public void testExportMetrics() throws InterruptedException, IOException {
// Mock a real-life scenario in production, where Agent is not enabled at first, then enabled
// after an outage. Users should be able to see metrics shortly after Agent is up.
registerAllViews();
LongGauge gauge = registerGauge();
// Register the OcAgent Exporter first.
// Agent is not yet up and running so Exporter will just retry connection.
OcAgentMetricsExporter.createAndRegister(
OcAgentMetricsExporterConfiguration.builder()
.setServiceName(SERVICE_NAME)
.setUseInsecure(true)
.setRetryInterval(RETRY_INTERVAL)
.setExportInterval(EXPORT_INTERVAL)
.build());
doWork(
5, gauge.getOrCreateTimeSeries(Collections.singletonList(LabelValue.create("First work"))));
// Wait 3s so that all metrics get exported.
Thread.sleep(3000);
// No interaction with Agent so far.
assertThat(fakeOcAgentMetricsServiceGrpc.getExportMetricsServiceRequests()).isEmpty();
// Imagine that an outage happened, now start Agent. Exporter should be able to connect to Agent
// after the next retry interval.
agent.start();
// Wait 3s for Exporter to start another attempt to connect to Agent.
Thread.sleep(3000);
doWork(
8,
gauge.getOrCreateTimeSeries(Collections.singletonList(LabelValue.create("Second work"))));
// Wait 3s so that all metrics get exported.
Thread.sleep(3000);
List<ExportMetricsServiceRequest> exportRequests =
fakeOcAgentMetricsServiceGrpc.getExportMetricsServiceRequests();
assertThat(exportRequests.size()).isAtLeast(2);
ExportMetricsServiceRequest firstRequest = exportRequests.get(0);
Node expectedNode = OcAgentNodeUtils.getNodeInfo(SERVICE_NAME);
Node actualNode = firstRequest.getNode();
assertThat(actualNode.getIdentifier().getHostName())
.isEqualTo(expectedNode.getIdentifier().getHostName());
assertThat(actualNode.getIdentifier().getPid())
.isEqualTo(expectedNode.getIdentifier().getPid());
assertThat(actualNode.getLibraryInfo()).isEqualTo(expectedNode.getLibraryInfo());
assertThat(actualNode.getServiceInfo()).isEqualTo(expectedNode.getServiceInfo());
List<Metric> metricProtos = new ArrayList<>();
for (int i = 1; i < exportRequests.size(); i++) {
metricProtos.addAll(exportRequests.get(i).getMetricsList());
}
// There should be at least one metric exported for each view and gauge (4 + 1).
assertThat(metricProtos.size()).isAtLeast(5);
Set<String> expectedMetrics = new HashSet<>();
expectedMetrics.add("jobs");
for (View view : VIEWS) {
expectedMetrics.add(view.getName().asString());
}
Set<String> actualMetrics = new HashSet<>();
for (Metric metricProto : metricProtos) {
actualMetrics.add(metricProto.getMetricDescriptor().getName());
}
assertThat(actualMetrics).containsExactlyElementsIn(expectedMetrics);
}
private static void registerAllViews() {
for (View view : VIEWS) {
viewManager.registerView(view);
}
}
private static LongGauge registerGauge() {
return metricRegistry.addLongGauge(
"jobs", "Pending jobs", "1", Collections.singletonList(LabelKey.create("Name", "desc")));
}
private static void doWork(int jobs, LongPoint point) {
for (int i = 0; i < jobs; i++) {
point.set(jobs - i);
String line = generateRandom(random.nextInt(128));
processLine(line);
recordStat(M_LINES_IN, 1L);
recordStat(M_LINE_LENGTHS, (long) line.length());
}
}
private static String generateRandom(int size) {
byte[] array = new byte[size];
random.nextBytes(array);
return new String(array, Charset.forName("UTF-8"));
}
private static String processLine(String line) {
long startTimeNs = System.nanoTime();
try {
Thread.sleep(10L);
return line.toUpperCase(Locale.US);
} catch (Exception e) {
recordTaggedStat(KEY_METHOD, "processLine", M_ERRORS, 1L);
return "";
} finally {
long totalTimeNs = System.nanoTime() - startTimeNs;
double timespentMs = totalTimeNs / 1e6;
recordTaggedStat(KEY_METHOD, "processLine", M_LATENCY_MS, timespentMs);
}
}
private static void recordStat(MeasureLong ml, Long n) {
TagContext empty = tagger.emptyBuilder().build();
statsRecorder.newMeasureMap().put(ml, n).record(empty);
}
private static void recordTaggedStat(TagKey key, String value, MeasureLong ml, long n) {
TagContext context = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
statsRecorder.newMeasureMap().put(ml, n).record(context);
}
private static void recordTaggedStat(TagKey key, String value, MeasureDouble md, double d) {
TagContext context = tagger.emptyBuilder().put(key, TagValue.create(value)).build();
statsRecorder.newMeasureMap().put(md, d).record(context);
}
private static Server getServer(String endPoint, BindableService service) {
ServerBuilder<?> builder = NettyServerBuilder.forAddress(parseEndpoint(endPoint));
Executor executor = MoreExecutors.directExecutor();
builder.executor(executor);
return builder.addService(service).build();
}
private static InetSocketAddress parseEndpoint(String endPoint) {
try {
int colonIndex = endPoint.indexOf(":");
String host = endPoint.substring(0, colonIndex);
int port = Integer.parseInt(endPoint.substring(colonIndex + 1));
return new InetSocketAddress(host, port);
} catch (RuntimeException e) {
return new InetSocketAddress("localhost", 55678);
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.branch;
import com.intellij.dvcs.DvcsUtil;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vcs.VcsNotifier;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ArrayUtil;
import git4idea.GitUtil;
import git4idea.commands.*;
import git4idea.config.GitVcsSettings;
import git4idea.repo.GitRepository;
import git4idea.util.GitPreservingProcess;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import static git4idea.util.GitUIUtil.code;
/**
* Represents {@code git checkout} operation.
* Fails to checkout if there are unmerged files.
* Fails to checkout if there are untracked files that would be overwritten by checkout. Shows the list of files.
* If there are local changes that would be overwritten by checkout, proposes to perform a "smart checkout" which means stashing local
* changes, checking out, and then unstashing the changes back (possibly with showing the conflict resolving dialog).
*
* @author Kirill Likhodedov
*/
class GitCheckoutOperation extends GitBranchOperation {
public static final String ROLLBACK_PROPOSAL_FORMAT = "You may rollback (checkout back to previous branch) not to let branches diverge.";
@NotNull private final String myStartPointReference;
private final boolean myDetach;
private final boolean myRefShouldBeValid;
@Nullable private final String myNewBranch;
GitCheckoutOperation(@NotNull Project project,
@NotNull Git git,
@NotNull GitBranchUiHandler uiHandler,
@NotNull Collection<GitRepository> repositories,
@NotNull String startPointReference,
boolean detach,
boolean refShouldBeValid,
@Nullable String newBranch) {
super(project, git, uiHandler, repositories);
myStartPointReference = startPointReference;
myDetach = detach;
myRefShouldBeValid = refShouldBeValid;
myNewBranch = newBranch;
}
@Override
protected void execute() {
saveAllDocuments();
boolean fatalErrorHappened = false;
AccessToken token = DvcsUtil.workingTreeChangeStarted(myProject);
try {
while (hasMoreRepositories() && !fatalErrorHappened) {
final GitRepository repository = next();
VirtualFile root = repository.getRoot();
GitLocalChangesWouldBeOverwrittenDetector localChangesDetector =
new GitLocalChangesWouldBeOverwrittenDetector(root, GitLocalChangesWouldBeOverwrittenDetector.Operation.CHECKOUT);
GitSimpleEventDetector unmergedFiles = new GitSimpleEventDetector(GitSimpleEventDetector.Event.UNMERGED_PREVENTING_CHECKOUT);
GitSimpleEventDetector unknownPathspec = new GitSimpleEventDetector(GitSimpleEventDetector.Event.INVALID_REFERENCE);
GitUntrackedFilesOverwrittenByOperationDetector untrackedOverwrittenByCheckout =
new GitUntrackedFilesOverwrittenByOperationDetector(root);
GitCommandResult result = myGit.checkout(repository, myStartPointReference, myNewBranch, false, myDetach,
localChangesDetector, unmergedFiles, unknownPathspec, untrackedOverwrittenByCheckout);
if (result.success()) {
refresh(repository);
markSuccessful(repository);
}
else if (unmergedFiles.hasHappened()) {
fatalUnmergedFilesError();
fatalErrorHappened = true;
}
else if (localChangesDetector.wasMessageDetected()) {
boolean smartCheckoutSucceeded = smartCheckoutOrNotify(repository, localChangesDetector);
if (!smartCheckoutSucceeded) {
fatalErrorHappened = true;
}
}
else if (untrackedOverwrittenByCheckout.wasMessageDetected()) {
fatalUntrackedFilesError(repository.getRoot(), untrackedOverwrittenByCheckout.getRelativeFilePaths());
fatalErrorHappened = true;
}
else if (!myRefShouldBeValid && unknownPathspec.hasHappened()) {
markSkip(repository);
}
else {
fatalError(getCommonErrorTitle(), result.getErrorOutputAsJoinedString());
fatalErrorHappened = true;
}
}
}
finally {
DvcsUtil.workingTreeChangeFinished(myProject, token);
}
if (!fatalErrorHappened) {
if (wereSuccessful()) {
if (!wereSkipped()) {
notifySuccess();
updateRecentBranch();
}
else {
String mentionSuccess = getSuccessMessage() + GitUtil.mention(getSuccessfulRepositories(), 4);
String mentionSkipped = wereSkipped() ? "<br>Revision not found" + GitUtil.mention(getSkippedRepositories(), 4) : "";
VcsNotifier.getInstance(myProject).notifySuccess("",
mentionSuccess +
mentionSkipped +
"<br><a href='rollback'>Rollback</a>",
new RollbackOperationNotificationListener());
updateRecentBranch();
}
}
else {
LOG.assertTrue(!myRefShouldBeValid);
notifyError("Couldn't checkout " + myStartPointReference, "Revision not found" + GitUtil.mention(getSkippedRepositories(), 4));
}
}
}
private boolean smartCheckoutOrNotify(@NotNull GitRepository repository,
@NotNull GitMessageWithFilesDetector localChangesOverwrittenByCheckout) {
Pair<List<GitRepository>, List<Change>> conflictingRepositoriesAndAffectedChanges =
getConflictingRepositoriesAndAffectedChanges(repository, localChangesOverwrittenByCheckout, myCurrentHeads.get(repository),
myStartPointReference);
List<GitRepository> allConflictingRepositories = conflictingRepositoriesAndAffectedChanges.getFirst();
List<Change> affectedChanges = conflictingRepositoriesAndAffectedChanges.getSecond();
Collection<String> absolutePaths = GitUtil.toAbsolute(repository.getRoot(), localChangesOverwrittenByCheckout.getRelativeFilePaths());
int smartCheckoutDecision = myUiHandler.showSmartOperationDialog(myProject, affectedChanges, absolutePaths, "checkout",
"&Force Checkout");
if (smartCheckoutDecision == GitSmartOperationDialog.SMART_EXIT_CODE) {
boolean smartCheckedOutSuccessfully = smartCheckout(allConflictingRepositories, myStartPointReference, myNewBranch, getIndicator());
if (smartCheckedOutSuccessfully) {
for (GitRepository conflictingRepository : allConflictingRepositories) {
markSuccessful(conflictingRepository);
refresh(conflictingRepository);
}
return true;
}
else {
// notification is handled in smartCheckout()
return false;
}
}
else if (smartCheckoutDecision == GitSmartOperationDialog.FORCE_EXIT_CODE) {
boolean forceCheckoutSucceeded = checkoutOrNotify(allConflictingRepositories, myStartPointReference, myNewBranch, true);
if (forceCheckoutSucceeded) {
markSuccessful(ArrayUtil.toObjectArray(allConflictingRepositories, GitRepository.class));
refresh(ArrayUtil.toObjectArray(allConflictingRepositories, GitRepository.class));
}
return forceCheckoutSucceeded;
}
else {
fatalLocalChangesError(myStartPointReference);
return false;
}
}
@NotNull
@Override
protected String getRollbackProposal() {
return "However checkout has succeeded for the following " + repositories() + ":<br/>" +
successfulRepositoriesJoined() + "<br/>" + ROLLBACK_PROPOSAL_FORMAT;
}
@NotNull
@Override
protected String getOperationName() {
return "checkout";
}
@Override
protected void rollback() {
GitCompoundResult checkoutResult = new GitCompoundResult(myProject);
GitCompoundResult deleteResult = new GitCompoundResult(myProject);
for (GitRepository repository : getSuccessfulRepositories()) {
GitCommandResult result = myGit.checkout(repository, myCurrentHeads.get(repository), null, true, false);
checkoutResult.append(repository, result);
if (result.success() && myNewBranch != null) {
/*
force delete is needed, because we create new branch from branch other that the current one
e.g. being on master create newBranch from feature,
then rollback => newBranch is not fully merged to master (although it is obviously fully merged to feature).
*/
deleteResult.append(repository, myGit.branchDelete(repository, myNewBranch, true));
}
refresh(repository);
}
if (!checkoutResult.totalSuccess() || !deleteResult.totalSuccess()) {
StringBuilder message = new StringBuilder();
if (!checkoutResult.totalSuccess()) {
message.append("Errors during checkout: ");
message.append(checkoutResult.getErrorOutputWithReposIndication());
}
if (!deleteResult.totalSuccess()) {
message.append("Errors during deleting ").append(code(myNewBranch)).append(": ");
message.append(deleteResult.getErrorOutputWithReposIndication());
}
VcsNotifier.getInstance(myProject).notifyError("Error during rollback",
message.toString());
}
}
@NotNull
private String getCommonErrorTitle() {
return "Couldn't checkout " + myStartPointReference;
}
@NotNull
@Override
public String getSuccessMessage() {
if (myNewBranch == null) {
return String.format("Checked out <b><code>%s</code></b>", myStartPointReference);
}
return String.format("Checked out new branch <b><code>%s</code></b> from <b><code>%s</code></b>", myNewBranch, myStartPointReference);
}
// stash - checkout - unstash
private boolean smartCheckout(@NotNull final List<GitRepository> repositories, @NotNull final String reference,
@Nullable final String newBranch, @NotNull ProgressIndicator indicator) {
final AtomicBoolean result = new AtomicBoolean();
GitPreservingProcess preservingProcess = new GitPreservingProcess(myProject, myGit,
GitUtil.getRootsFromRepositories(repositories), "checkout", reference,
GitVcsSettings.UpdateChangesPolicy.STASH, indicator,
new Runnable() {
@Override
public void run() {
result.set(checkoutOrNotify(repositories, reference, newBranch, false));
}
});
preservingProcess.execute();
return result.get();
}
/**
* Checks out or shows an error message.
*/
private boolean checkoutOrNotify(@NotNull List<GitRepository> repositories,
@NotNull String reference, @Nullable String newBranch, boolean force) {
GitCompoundResult compoundResult = new GitCompoundResult(myProject);
for (GitRepository repository : repositories) {
compoundResult.append(repository, myGit.checkout(repository, reference, newBranch, force, myDetach));
}
if (compoundResult.totalSuccess()) {
return true;
}
notifyError("Couldn't checkout " + reference, compoundResult.getErrorOutputWithReposIndication());
return false;
}
private void refresh(GitRepository... repositories) {
for (GitRepository repository : repositories) {
refreshRoot(repository);
// repository state will be auto-updated with this VFS refresh => in general there is no need to call GitRepository#update()
// but to avoid problems of the asynchronous refresh, let's force update the repository info.
repository.update();
}
}
private class RollbackOperationNotificationListener implements NotificationListener {
@Override
public void hyperlinkUpdate(@NotNull Notification notification,
@NotNull HyperlinkEvent event) {
if (event.getEventType() == HyperlinkEvent.EventType.ACTIVATED && event.getDescription().equalsIgnoreCase("rollback")) {
rollback();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.query;
import java.util.Arrays;
import java.util.Comparator;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.api.ResultRow;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.query.ast.ColumnImpl;
import org.apache.jackrabbit.oak.query.ast.OrderingImpl;
import org.apache.jackrabbit.oak.plugins.memory.PropertyValues;
import org.apache.jackrabbit.oak.spi.query.QueryConstants;
/**
* A query result row that keeps all data (for this row only) in memory.
*/
public class ResultRowImpl implements ResultRow {
private final Query query;
private final Tree[] trees;
/**
* The column values.
*/
private final PropertyValue[] values;
/**
* Whether the value at the given index is used for comparing rows (used
* within hashCode and equals). If null, all columns are distinct.
*/
private final boolean[] distinctValues;
/**
* The values used for ordering.
*/
private final PropertyValue[] orderValues;
ResultRowImpl(Query query, Tree[] trees, PropertyValue[] values, boolean[] distinctValues, PropertyValue[] orderValues) {
this.query = query;
this.trees = trees;
this.values = values;
this.distinctValues = distinctValues;
this.orderValues = orderValues;
}
PropertyValue[] getOrderValues() {
return orderValues;
}
@Override
public String getPath() {
return getPath(null);
}
@Override
public String getPath(String selectorName) {
Tree tree = getTree(selectorName);
if (tree != null) {
return tree.getPath();
} else {
return null;
}
}
@Override
public Tree getTree(String selectorName) {
if (selectorName == null) {
if (trees.length > 1) {
throw new IllegalArgumentException("More than one selector");
} else if (trees.length == 0) {
throw new IllegalArgumentException("This query does not have a selector");
}
return trees[0];
}
int index = query.getSelectorIndex(selectorName);
if (trees == null || index >= trees.length) {
return null;
}
return trees[index];
}
@Override
public PropertyValue getValue(String columnName) {
int index = query.getColumnIndex(columnName);
if (index >= 0) {
return values[index];
}
if (JcrConstants.JCR_PATH.equals(columnName)) {
return PropertyValues.newString(getPath());
}
// OAK-318:
// somebody might call rep:excerpt(text)
// even though the query doesn't contain that column
if (columnName.startsWith(QueryConstants.REP_EXCERPT)) {
int columnIndex = query.getColumnIndex(QueryConstants.REP_EXCERPT);
PropertyValue indexExcerptValue = null;
if (columnIndex >= 0) {
indexExcerptValue = values[columnIndex];
if (indexExcerptValue != null) {
if (QueryConstants.REP_EXCERPT.equals(columnName) || SimpleExcerptProvider.REP_EXCERPT_FN.equals(columnName)) {
return SimpleExcerptProvider.getExcerpt(indexExcerptValue);
}
}
}
return getFallbackExcerpt(columnName, indexExcerptValue);
}
throw new IllegalArgumentException("Column not found: " + columnName);
}
private PropertyValue getFallbackExcerpt(String columnName, PropertyValue indexValue) {
String ex = SimpleExcerptProvider.getExcerpt(getPath(), columnName,
query, true);
if (ex != null && ex.length() > 24) {
return PropertyValues.newString(ex);
} else if (indexValue != null) {
return SimpleExcerptProvider.getExcerpt(indexValue);
}
return PropertyValues.newString(getPath());
}
@Override
public PropertyValue[] getValues() {
PropertyValue[] v2 = new PropertyValue[values.length];
System.arraycopy(values, 0, v2, 0, values.length);
return v2;
}
@Override
public String toString() {
StringBuilder buff = new StringBuilder();
for (String s : query.getSelectorNames()) {
String p = getPath(s);
if (p != null) {
buff.append(s).append(": ").append(p).append(" ");
}
}
ColumnImpl[] cols = query.getColumns();
for (int i = 0; i < values.length; i++) {
ColumnImpl c = cols[i];
String n = c.getColumnName();
if (n != null) {
buff.append(n).append(": ").append(values[i]).append(" ");
}
}
return buff.toString();
}
@Override
public int hashCode() {
int result = 1;
result = 31 * result + Arrays.hashCode(getPaths());
result = 31 * result + hashCodeOfValues();
return result;
}
private int hashCodeOfValues() {
int result = 1;
for (int i = 0; i < values.length; i++) {
if (distinctValues == null || distinctValues[i]) {
PropertyValue v = values[i];
result = 31 * result + (v == null ? 0 : v.hashCode());
}
}
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj == null) {
return false;
} else if (obj.getClass() != obj.getClass()) {
return false;
}
ResultRowImpl other = (ResultRowImpl) obj;
if (!Arrays.equals(getPaths(), other.getPaths())) {
return false;
} else if (!Arrays.equals(distinctValues, other.distinctValues)) {
return false;
}
// if distinctValues are equals, then the number of values
// is also equal
for (int i = 0; i < values.length; i++) {
if (distinctValues == null || distinctValues[i]) {
Object o1 = values[i];
Object o2 = other.values[i];
if (!(o1 == null ? o2 == null : o1.equals(o2))) {
return false;
}
}
}
return true;
}
private String[] getPaths() {
String[] paths = new String[trees.length];
for (int i = 0; i < trees.length; i++) {
if (trees[i] != null) {
paths[i] = trees[i].getPath();
} else {
paths[i] = null;
}
}
return paths;
}
public static Comparator<ResultRowImpl> getComparator(
final OrderingImpl[] orderings) {
if (orderings == null) {
return null;
}
return new Comparator<ResultRowImpl>() {
@Override
public int compare(ResultRowImpl o1, ResultRowImpl o2) {
PropertyValue[] orderValues = o1.getOrderValues();
PropertyValue[] orderValues2 = o2.getOrderValues();
int comp = 0;
for (int i = 0, size = orderings.length; i < size; i++) {
PropertyValue a = orderValues[i];
PropertyValue b = orderValues2[i];
if (a == null || b == null) {
if (a == b) {
comp = 0;
} else if (a == null) {
// TODO order by: nulls first (it looks like), or
// low?
comp = -1;
} else {
comp = 1;
}
} else {
comp = a.compareTo(b);
}
if (comp != 0) {
if (orderings[i].isDescending()) {
comp = -comp;
}
break;
}
}
return comp;
}
};
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.projectView.impl;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.SelectInTarget;
import com.intellij.ide.impl.ProjectPaneSelectInTarget;
import com.intellij.ide.projectView.BaseProjectTreeBuilder;
import com.intellij.ide.projectView.ProjectView;
import com.intellij.ide.projectView.ProjectViewSettings;
import com.intellij.ide.projectView.ViewSettings;
import com.intellij.ide.projectView.impl.nodes.*;
import com.intellij.ide.scratch.ScratchUtil;
import com.intellij.ide.util.treeView.AbstractTreeBuilder;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.ide.util.treeView.AbstractTreeUpdater;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.newvfs.ArchiveFileSystem;
import com.intellij.psi.PsiDirectory;
import com.intellij.util.PlatformUtils;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import javax.accessibility.AccessibleContext;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeModel;
import java.awt.*;
import static com.intellij.openapi.module.ModuleGrouperKt.isQualifiedModuleNamesEnabled;
import static java.awt.EventQueue.isDispatchThread;
public class ProjectViewPane extends AbstractProjectViewPSIPane {
@NonNls public static final String ID = "ProjectPane";
public ProjectViewPane(Project project) {
super(project);
}
@NotNull
@Override
public String getTitle() {
return IdeBundle.message("title.project");
}
@Override
@NotNull
public String getId() {
return ID;
}
@NotNull
@Override
public Icon getIcon() {
return AllIcons.General.ProjectTab;
}
@NotNull
@Override
public SelectInTarget createSelectInTarget() {
return new ProjectPaneSelectInTarget(myProject);
}
@NotNull
@Override
protected AbstractTreeUpdater createTreeUpdater(@NotNull AbstractTreeBuilder treeBuilder) {
return new ProjectViewTreeUpdater(treeBuilder);
}
@NotNull
@Override
protected ProjectAbstractTreeStructureBase createStructure() {
return new ProjectViewPaneTreeStructure();
}
@NotNull
@Override
protected ProjectViewTree createTree(@NotNull DefaultTreeModel treeModel) {
return new ProjectViewTree(treeModel) {
@Override
public String toString() {
return getTitle() + " " + super.toString();
}
@Override
public void setFont(Font font) {
if (Registry.is("bigger.font.in.project.view")) {
font = font.deriveFont(font.getSize() + 1.0f);
}
super.setFont(font);
}
@Override
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = super.getAccessibleContext();
accessibleContext.setAccessibleName(IdeBundle.message("project.structure.tree.accessible.name"));
}
return accessibleContext;
}
};
}
@NotNull
public String getComponentName() {
return "ProjectPane";
}
/**
* @return {@code true} if 'Project View' have more than one top-level module node or have top-level module group nodes
*/
private boolean hasSeveralTopLevelModuleNodes() {
if (!isDispatchThread()) return true; // do not check nodes during building
// TODO: have to rewrite this logic without using walking in a tree
TreeModel treeModel = myTree.getModel();
Object root = treeModel.getRoot();
int count = treeModel.getChildCount(root);
if (count <= 1) return false;
int moduleNodes = 0;
for (int i = 0; i < count; i++) {
Object child = treeModel.getChild(root, i);
if (child instanceof DefaultMutableTreeNode) {
Object node = ((DefaultMutableTreeNode)child).getUserObject();
if (node instanceof ProjectViewModuleNode || node instanceof PsiDirectoryNode) {
moduleNodes++;
if (moduleNodes > 1) {
return true;
}
}
else if (node instanceof ModuleGroupNode) {
return true;
}
}
}
return false;
}
@Override
public boolean isFileNestingEnabled() {
return true;
}
// should be first
@Override
public int getWeight() {
return 0;
}
private final class ProjectViewTreeUpdater extends AbstractTreeUpdater {
private ProjectViewTreeUpdater(final AbstractTreeBuilder treeBuilder) {
super(treeBuilder);
}
@Override
public boolean addSubtreeToUpdateByElement(@NotNull Object element) {
if (element instanceof PsiDirectory && !myProject.isDisposed()) {
final PsiDirectory dir = (PsiDirectory)element;
final ProjectTreeStructure treeStructure = (ProjectTreeStructure)myTreeStructure;
PsiDirectory dirToUpdateFrom = dir;
// optimization
// isEmptyMiddleDirectory can be slow when project VFS is not fully loaded (initial dumb mode).
// It's easiest to disable the optimization in any dumb mode
if (!treeStructure.isFlattenPackages() && treeStructure.isHideEmptyMiddlePackages() && !DumbService.isDumb(myProject)) {
while (dirToUpdateFrom != null && ProjectViewDirectoryHelper.getInstance(myProject).isEmptyMiddleDirectory(dirToUpdateFrom, true)) {
dirToUpdateFrom = dirToUpdateFrom.getParentDirectory();
}
}
boolean addedOk;
while (!(addedOk = super.addSubtreeToUpdateByElement(dirToUpdateFrom == null? myTreeStructure.getRootElement() : dirToUpdateFrom))) {
if (dirToUpdateFrom == null) {
break;
}
dirToUpdateFrom = dirToUpdateFrom.getParentDirectory();
}
return addedOk;
}
return super.addSubtreeToUpdateByElement(element);
}
}
private final class ProjectViewPaneTreeStructure extends ProjectTreeStructure implements ProjectViewSettings {
ProjectViewPaneTreeStructure() {
super(ProjectViewPane.this.myProject, ID);
}
@Override
protected AbstractTreeNode<?> createRoot(@NotNull Project project, @NotNull ViewSettings settings) {
return new ProjectViewProjectNode(project, settings);
}
@Override
public boolean isShowExcludedFiles() {
return ProjectView.getInstance(myProject).isShowExcludedFiles(ID);
}
@Override
public boolean isShowLibraryContents() {
return true;
}
@Override
public boolean isShowVisibilityIcons() {
return ProjectView.getInstance(myProject).isShowVisibilityIcons(ID);
}
@Override
public boolean isUseFileNestingRules() {
return ProjectView.getInstance(myProject).isUseFileNestingRules(ID);
}
@Override
public boolean isToBuildChildrenInBackground(@NotNull Object element) {
return Registry.is("ide.projectView.ProjectViewPaneTreeStructure.BuildChildrenInBackground");
}
}
@Override
protected BaseProjectTreeBuilder createBuilder(@NotNull DefaultTreeModel model) {
return null;
}
public static boolean canBeSelectedInProjectView(@NotNull Project project, @NotNull VirtualFile file) {
final VirtualFile archiveFile;
if(file.getFileSystem() instanceof ArchiveFileSystem)
archiveFile = ((ArchiveFileSystem)file.getFileSystem()).getLocalByEntry(file);
else
archiveFile = null;
ProjectFileIndex index = ProjectRootManager.getInstance(project).getFileIndex();
return (archiveFile != null && index.getContentRootForFile(archiveFile, false) != null) ||
index.getContentRootForFile(file, false) != null ||
index.isInLibrary(file) ||
Comparing.equal(file.getParent(), project.getBaseDir()) ||
ScratchUtil.isScratch(file);
}
@Override
public boolean supportsFlattenModules() {
return PlatformUtils.isIntelliJ() && isQualifiedModuleNamesEnabled(myProject) && hasSeveralTopLevelModuleNodes();
}
@Override
public boolean supportsShowExcludedFiles() {
return true;
}
}
| |
/*
* Copyright 2017 Matthew Tamlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.matthewtamlin.mixtape.library_tests.databinders;
import android.graphics.drawable.Drawable;
import android.support.test.runner.AndroidJUnit4;
import android.support.v4.util.LruCache;
import android.widget.ImageView;
import com.matthewtamlin.mixtape.library.data.DisplayableDefaults;
import com.matthewtamlin.mixtape.library.data.LibraryItem;
import com.matthewtamlin.mixtape.library.data.LibraryReadException;
import com.matthewtamlin.mixtape.library.databinders.ArtworkBinder;
import com.matthewtamlin.mixtape.library.databinders.TitleBinder;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Tests for the {@link ArtworkBinder} class.
*/
@RunWith(AndroidJUnit4.class)
public class TestArtworkBinder {
/**
* The length of time to pause for when waiting for background tasks to finish.
*/
private static final int PAUSE_DURATION = 500;
/**
* A mock artwork item.
*/
private Drawable artwork;
/**
* A mock default artwork item. This should be returned by the defaults.
*/
private Drawable defaultArtwork;
/**
* A mock artwork item. This should be inserted into the cache directly.
*/
private Drawable cachedArtwork;
/**
* A mock LibraryItem which returns artwork but no title or subtitle.
*/
private LibraryItem libraryItem;
/**
* A actual cache for use in testing, not a mock.
*/
private LruCache<LibraryItem, Drawable> cache;
/**
* A mock DisplayableDefaults object which returns default artwork but no title or subtitle.
*/
private DisplayableDefaults displayableDefaults;
/**
* A mock ImageView which data can be bound to.
*/
private ImageView imageView;
/**
* Initialises the testing objects and assigns them to member variables.
*/
@Before
public void setup() throws LibraryReadException {
artwork = mock(Drawable.class);
when(artwork.getIntrinsicWidth()).thenReturn(100);
when(artwork.getIntrinsicHeight()).thenReturn(100);
defaultArtwork = mock(Drawable.class);
when(defaultArtwork.getIntrinsicWidth()).thenReturn(100);
when(defaultArtwork.getIntrinsicHeight()).thenReturn(100);
cachedArtwork = mock(Drawable.class);
when(cachedArtwork.getIntrinsicWidth()).thenReturn(100);
when(cachedArtwork.getIntrinsicHeight()).thenReturn(100);
libraryItem = mock(LibraryItem.class);
when(libraryItem.getArtwork(anyInt(), anyInt())).thenReturn(artwork);
cache = new LruCache<>(10);
displayableDefaults = mock(DisplayableDefaults.class);
when(displayableDefaults.getArtwork()).thenReturn(defaultArtwork);
imageView = mock(ImageView.class);
}
/**
* Test to verify that the correct exception is thrown when the {@code cache} argument of {@link
* ArtworkBinder#ArtworkBinder(LruCache, DisplayableDefaults)} is null. The test will only pass
* if an IllegalArgumentException is thrown.
*/
@Test(expected = IllegalArgumentException.class)
public void testConstructor_invalidArgs_nullCache() {
new ArtworkBinder(null, displayableDefaults);
}
/**
* Test to verify that the correct exception is thrown when the {@code defaults} argument of
* {@link ArtworkBinder#ArtworkBinder(LruCache, DisplayableDefaults)} is null. The test will
* only pass if an IllegalArgumentException is thrown.
*/
@Test(expected = IllegalArgumentException.class)
public void testConstructor_invalidArgs_nullDefaults() {
new ArtworkBinder(cache, null);
}
/**
* Test to verify that the {@link ArtworkBinder#ArtworkBinder(LruCache, DisplayableDefaults)}
* constructor functions correctly when provided with valid arguments. The test will only pass
* if the getters return the values passed to the constructor.
*/
public void testConstructor_validArgs() {
final ArtworkBinder binder = new ArtworkBinder(cache, displayableDefaults);
assertThat("Incorrect cache.", binder.getCache(), is(cache));
assertThat("Incorrect defaults.", binder.getDefaults(), is(displayableDefaults));
}
/**
* Test to verify that the correct exception is thrown when the {@code view} argument of {@link
* ArtworkBinder#bind(ImageView, LibraryItem)} is null. The test will only pass if an
* IllegalArgumentException is thrown.
*/
@Test(expected = IllegalArgumentException.class)
public void testBind_invalidArgs_nullView() {
final ArtworkBinder binder = new ArtworkBinder(cache, displayableDefaults);
binder.bind(null, libraryItem);
}
/**
* Test to verify that the {@link ArtworkBinder#bind(ImageView, LibraryItem)} method functions
* correctly when the {@code data} argument is null. The test will only pass if null is bound to
* the view.
*/
@Test
public void testBind_nullData() {
final ArtworkBinder binder = new ArtworkBinder(cache, displayableDefaults);
binder.bind(imageView, null);
waitForAsyncEventsToFinish();
verify(imageView, atLeastOnce()).setImageDrawable(null);
verify(imageView, never()).setImageDrawable(artwork);
verify(imageView, never()).setImageDrawable(defaultArtwork);
}
/**
* Test to verify that the {@link ArtworkBinder#bind(ImageView, LibraryItem)} method functions
* correctly when the {@code data} argument is not null, but it returns null artwork. The test
* will only pass if null is bound to the view.
*/
@Test
public void testBind_nullArtwork() throws LibraryReadException {
final ArtworkBinder binder = new ArtworkBinder(cache, displayableDefaults);
when(libraryItem.getArtwork(anyInt(), anyInt())).thenReturn(null);
binder.bind(imageView, libraryItem);
waitForAsyncEventsToFinish();
verify(imageView, atLeastOnce()).setImageDrawable(null);
verify(imageView, never()).setImageDrawable(artwork);
verify(imageView, never()).setImageDrawable(defaultArtwork);
}
/**
* Test to verify that the {@link ArtworkBinder#bind(ImageView, LibraryItem)} method functions
* correctly when the cache already contains artwork for the bound LibraryItem. The test will
* only pass if the cached artwork is bound to the view.
*/
@Test
public void testBind_dataCached() {
final ArtworkBinder binder = new ArtworkBinder(cache, displayableDefaults);
cache.put(libraryItem, cachedArtwork);
binder.bind(imageView, libraryItem);
waitForAsyncEventsToFinish();
verify(imageView).setImageDrawable(cachedArtwork);
assertThat("Artwork was removed from the cache.", cache.get(libraryItem),
is(cachedArtwork));
}
/**
* Test to verify that the {@link ArtworkBinder#bind(ImageView, LibraryItem)} method functions
* correctly when the cache does not contain artwork for the bound LibraryItem, and the
* LibraryItem provides access to artwork. The test will only pass if the item's artwork is
* bound to the view.
*/
@Test
public void testBind_dataNotCached_dataAccessible() {
final ArtworkBinder binder = new ArtworkBinder(cache, displayableDefaults);
binder.bind(imageView, libraryItem);
waitForAsyncEventsToFinish();
verify(imageView).setImageDrawable(artwork);
assertThat("Artwork was not added to the cache.", cache.get(libraryItem), is(artwork));
}
/**
* Test to verify that the {@link ArtworkBinder#bind(ImageView, LibraryItem)} method functions
* correctly when the cache does not contain artwork for the bound LibraryItem, and the
* LibraryItem fails to provide access to artwork. The test will only pass if the default
* artwork is bound to the view.
*/
@Test
public void testBind_dataNotCached_dataInaccessible() throws LibraryReadException {
final ArtworkBinder binder = new ArtworkBinder(cache, displayableDefaults);
final LibraryItem inaccessibleItem = mock(LibraryItem.class);
when(inaccessibleItem.getArtwork(anyInt(), anyInt())).thenThrow(new LibraryReadException());
binder.bind(imageView, inaccessibleItem);
waitForAsyncEventsToFinish();
verify(imageView).setImageDrawable(defaultArtwork);
assertThat("Something was added to the cache.", cache.get(libraryItem), is(nullValue()));
}
/**
* Suspends execution of the current thread. The duration is defined by the {@code
* PAUSE_DURATION} constant.
*/
private void waitForAsyncEventsToFinish() {
try {
Thread.sleep(PAUSE_DURATION);
} catch (final InterruptedException e) {
throw new RuntimeException("Wait interrupted, test aborted.");
}
}
}
| |
/*
* Copyright 2021 Apollo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.ctrip.framework.apollo.core;
import com.ctrip.framework.apollo.core.enums.Env;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import com.ctrip.framework.apollo.core.utils.DeferredLoggerFactory;
import org.slf4j.Logger;
import com.ctrip.framework.apollo.core.spi.MetaServerProvider;
import com.ctrip.framework.apollo.core.utils.ApolloThreadFactory;
import com.ctrip.framework.apollo.core.utils.NetUtil;
import com.ctrip.framework.apollo.tracer.Tracer;
import com.ctrip.framework.apollo.tracer.spi.Transaction;
import com.ctrip.framework.foundation.internals.ServiceBootstrap;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
* The meta domain will try to load the meta server address from MetaServerProviders, the default
* ones are:
*
* <ul>
* <li>com.ctrip.framework.apollo.core.internals.LegacyMetaServerProvider</li>
* </ul>
* <p>
* If no provider could provide the meta server url, the default meta url will be used(http://apollo.meta).
* <br />
* <p>
* 3rd party MetaServerProvider could be injected by typical Java Service Loader pattern.
*
* @see com.ctrip.framework.apollo.core.internals.LegacyMetaServerProvider
*/
public class MetaDomainConsts {
public static final String DEFAULT_META_URL = "http://apollo.meta";
// env -> meta server address cache
private static final Map<Env, String> metaServerAddressCache = Maps.newConcurrentMap();
private static volatile List<MetaServerProvider> metaServerProviders = null;
private static final long REFRESH_INTERVAL_IN_SECOND = 60;// 1 min
private static final Logger logger = DeferredLoggerFactory.getLogger(MetaDomainConsts.class);
// comma separated meta server address -> selected single meta server address cache
private static final Map<String, String> selectedMetaServerAddressCache = Maps.newConcurrentMap();
private static final AtomicBoolean periodicRefreshStarted = new AtomicBoolean(false);
private static final Object LOCK = new Object();
/**
* Return one meta server address. If multiple meta server addresses are configured, will select
* one.
*/
public static String getDomain(Env env) {
String metaServerAddress = getMetaServerAddress(env);
// if there is more than one address, need to select one
if (metaServerAddress.contains(",")) {
return selectMetaServerAddress(metaServerAddress);
}
return metaServerAddress;
}
/**
* Return meta server address. If multiple meta server addresses are configured, will return the
* comma separated string.
*/
public static String getMetaServerAddress(Env env) {
if (!metaServerAddressCache.containsKey(env)) {
initMetaServerAddress(env);
}
return metaServerAddressCache.get(env);
}
private static void initMetaServerAddress(Env env) {
if (metaServerProviders == null) {
synchronized (LOCK) {
if (metaServerProviders == null) {
metaServerProviders = initMetaServerProviders();
}
}
}
String metaAddress = null;
for (MetaServerProvider provider : metaServerProviders) {
metaAddress = provider.getMetaServerAddress(env);
if (!Strings.isNullOrEmpty(metaAddress)) {
logger.info("Located meta server address {} for env {} from {}", metaAddress, env,
provider.getClass().getName());
break;
}
}
if (Strings.isNullOrEmpty(metaAddress)) {
// Fallback to default meta address
metaAddress = DEFAULT_META_URL;
logger.warn(
"Meta server address fallback to {} for env {}, because it is not available in all MetaServerProviders",
metaAddress, env);
}
metaServerAddressCache.put(env, metaAddress.trim());
}
private static List<MetaServerProvider> initMetaServerProviders() {
Iterator<MetaServerProvider> metaServerProviderIterator = ServiceBootstrap
.loadAll(MetaServerProvider.class);
List<MetaServerProvider> metaServerProviders = Lists.newArrayList(metaServerProviderIterator);
Collections.sort(metaServerProviders, new Comparator<MetaServerProvider>() {
@Override
public int compare(MetaServerProvider o1, MetaServerProvider o2) {
// the smaller order has higher priority
return Integer.compare(o1.getOrder(), o2.getOrder());
}
});
return metaServerProviders;
}
/**
* Select one available meta server from the comma separated meta server addresses, e.g.
* http://1.2.3.4:8080,http://2.3.4.5:8080
* <p>
* <br />
* <p>
* In production environment, we still suggest using one single domain like
* http://config.xxx.com(backed by software load balancers like nginx) instead of multiple ip
* addresses
*/
private static String selectMetaServerAddress(String metaServerAddresses) {
String metaAddressSelected = selectedMetaServerAddressCache.get(metaServerAddresses);
if (metaAddressSelected == null) {
// initialize
if (periodicRefreshStarted.compareAndSet(false, true)) {
schedulePeriodicRefresh();
}
updateMetaServerAddresses(metaServerAddresses);
metaAddressSelected = selectedMetaServerAddressCache.get(metaServerAddresses);
}
return metaAddressSelected;
}
private static void updateMetaServerAddresses(String metaServerAddresses) {
logger.debug("Selecting meta server address for: {}", metaServerAddresses);
Transaction transaction = Tracer
.newTransaction("Apollo.MetaService", "refreshMetaServerAddress");
transaction.addData("Url", metaServerAddresses);
try {
List<String> metaServers = Lists.newArrayList(metaServerAddresses.split(","));
// random load balancing
Collections.shuffle(metaServers);
boolean serverAvailable = false;
for (String address : metaServers) {
address = address.trim();
//check whether /services/config is accessible
if (NetUtil.pingUrl(address + "/services/config")) {
// select the first available meta server
selectedMetaServerAddressCache.put(metaServerAddresses, address);
serverAvailable = true;
logger.debug("Selected meta server address {} for {}", address, metaServerAddresses);
break;
}
}
// we need to make sure the map is not empty, e.g. the first update might be failed
if (!selectedMetaServerAddressCache.containsKey(metaServerAddresses)) {
selectedMetaServerAddressCache.put(metaServerAddresses, metaServers.get(0).trim());
}
if (!serverAvailable) {
logger.warn(
"Could not find available meta server for configured meta server addresses: {}, fallback to: {}",
metaServerAddresses, selectedMetaServerAddressCache.get(metaServerAddresses));
}
transaction.setStatus(Transaction.SUCCESS);
} catch (Throwable ex) {
transaction.setStatus(ex);
throw ex;
} finally {
transaction.complete();
}
}
private static void schedulePeriodicRefresh() {
ScheduledExecutorService scheduledExecutorService =
Executors.newScheduledThreadPool(1, ApolloThreadFactory.create("MetaServiceLocator", true));
scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
for (String metaServerAddresses : selectedMetaServerAddressCache.keySet()) {
updateMetaServerAddresses(metaServerAddresses);
}
} catch (Throwable ex) {
logger
.warn(String.format("Refreshing meta server address failed, will retry in %d seconds",
REFRESH_INTERVAL_IN_SECOND), ex);
}
}
}, REFRESH_INTERVAL_IN_SECOND, REFRESH_INTERVAL_IN_SECOND, TimeUnit.SECONDS);
}
}
| |
/*
* Copyright (c) 2002-2007 Sun Microsystems, Inc. All rights reserved.
*
* The Sun Project JXTA(TM) Software License
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. The end-user documentation included with the redistribution, if any, must
* include the following acknowledgment: "This product includes software
* developed by Sun Microsystems, Inc. for JXTA(TM) technology."
* Alternately, this acknowledgment may appear in the software itself, if
* and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Sun", "Sun Microsystems, Inc.", "JXTA" and "Project JXTA" must
* not be used to endorse or promote products derived from this software
* without prior written permission. For written permission, please contact
* Project JXTA at http://www.jxta.org.
*
* 5. Products derived from this software may not be called "JXTA", nor may
* "JXTA" appear in their name, without prior written permission of Sun.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUN
* MICROSYSTEMS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* JXTA is a registered trademark of Sun Microsystems, Inc. in the United
* States and other countries.
*
* Please see the license information page at :
* <http://www.jxta.org/project/www/license.html> for instructions on use of
* the license in source files.
*
* ====================================================================
*
* This software consists of voluntary contributions made by many individuals
* on behalf of Project JXTA. For more information on Project JXTA, please see
* http://www.jxta.org.
*
* This license is based on the BSD license adopted by the Apache Foundation.
*/
package net.jxta.impl.util;
import java.util.HashMap;
import java.util.Map;
/**
* A Cache which is similar to {@link java.util.LinkedHashMap}
*
* <p/>LinkedList cannot be used efficiently because it
* cannot remove an element efficiently from the middle. For that, we need
* the externally referenced element (the thing to be removed) to
* be the list entry itself, rather than referenced by an invisible
* list entry. That is why we use the DLink/Dlist family.
*/
public class Cache {
/**
* CacheEntryImpl objects are both part of a doubly linked list and
* inserted in a HashMap. They refer to the thing mapped which is what
* users of this class want to get, and to the key. The reason is
* that we need the key to remove from the map
* an entry that we found in list. The otherway around is made easy by
* the nature of the dlinked structure.
**/
class CacheEntryImpl extends Dlink implements CacheEntry {
private final Object value;
private final Object key;
// The application interface.
public CacheEntryImpl(Object k, Object v) {
key = k;
value = v;
}
/**
* {@inheritDoc}
**/
public Object getKey() {
return key;
}
/**
* {@inheritDoc}
**/
public Object getValue() {
return value;
}
}
private final long maxSize;
private long size;
private final Map map = new HashMap();
private final Dlist lru = new Dlist();
private final CacheEntryListener listener;
/**
* Creates a cache whih will keep at most maxSize purgeable entries.
* Every new entry is purgeable by default.
*
* <p/>Entries that are not purgeable are not counted and are never removed
* unless clear() or remove() is called. Purgeable entries are removed
* silently as needed to make room for new entries so that the number
* of purgeable entries remains < maxSize.
*
* <p/>Entries prugeability is controlled by invoking the sticky() method
* or the stickyCacheEntry() method.
*
* <p/>For now, purged entries are abandonned to the GC which is probably not
* so bad. To permit acceleration of the collection of resources, a
* purge listener will be added soon.
*/
public Cache(long maxSize, CacheEntryListener listener) {
this.maxSize = maxSize;
this.size = 0;
this.listener = listener;
}
/**
* Empties the cache completely.
* The entries are abandonned to the GC.
*/
public void clear() {
lru.clear();
map.clear();
}
/**
* Purges some of the cache.
* The entries are cleaned-up properly.
*/
public void purge(int fraction) {
if (size == 0) {
return;
}
if (fraction == 0) {
fraction = 1;
}
long nbToPurge = size / fraction;
if (nbToPurge == 0) {
nbToPurge = 1;
}
while (nbToPurge-- > 0) {
CacheEntryImpl toRm = (CacheEntryImpl) lru.next();
map.remove(toRm.getKey());
toRm.unlink();
--size;
if (listener != null) {
listener.purged(toRm);
}
}
}
/**
* Inserts the given cache entry directly.
* Returns the previous cache entry associated with the given key, if any.
* Not exposed yet. Should not be a problem to expose it, but it is not
* needed yet.
*/
protected CacheEntry putCacheEntry(Object key, CacheEntry value) {
if (size == maxSize) {
CacheEntryImpl toRm = (CacheEntryImpl) lru.next();
map.remove(toRm.getKey());
toRm.unlink();
--size;
if (listener != null) {
listener.purged(toRm);
}
}
lru.putLast((CacheEntryImpl) value);
++size;
CacheEntryImpl oldEntry = (CacheEntryImpl) map.put(key, value);
if (oldEntry == null) {
return null;
}
if (oldEntry.isLinked()) {
oldEntry.unlink();
--size;
}
return oldEntry;
}
/**
* Create a cache entry to hold the given value, and insert it.
* Returns the previous value associated with the given key, if any.
*/
public Object put(Object key, Object value) {
CacheEntry oldEntry = putCacheEntry(key, new CacheEntryImpl(key, value));
if (oldEntry == null) {
return null;
}
return oldEntry.getValue();
}
/**
* Remove the value, if any, and cacheEntry associated with the given key.
* return the cacheEntry that has been removed.
* Not exposed yet. Should not be a problem to expose it, but it is not
* needed yet.
*/
protected CacheEntry removeCacheEntry(Object key) {
CacheEntryImpl oldEntry = (CacheEntryImpl) map.remove(key);
if (oldEntry == null) {
return null;
}
if (oldEntry.isLinked()) {
oldEntry.unlink();
--size;
}
return oldEntry;
}
/**
* Remove the value, if any, and cacheEntry associated with the given key.
* returns the value that has been removed.
*/
public Object remove(Object key) {
CacheEntry oldEntry = removeCacheEntry(key);
if (oldEntry == null) {
return null;
}
return oldEntry.getValue();
}
/**
* Return the cache entry, if any, associated with the given key.
* This is public; it improves performance by letting the application
* do a single lookup instead of two when it needs to find an object in
* the cache and then change its purgeability.
*/
public CacheEntry getCacheEntry(Object key) {
CacheEntryImpl foundEntry = (CacheEntryImpl) map.get(key);
if (foundEntry == null) {
return null;
}
// Leave the purgeability status alone but manage lru position if
// purgeable.
if (foundEntry.isLinked()) {
lru.putLast(foundEntry);
}
return foundEntry;
}
/**
* Return the value, if any associated with the given key.
*/
public Object get(Object key) {
CacheEntry foundEntry = getCacheEntry(key);
if (foundEntry == null) {
return null;
}
return foundEntry.getValue();
}
/**
* Change the purgeability of the given cacheEntry.
* If sticky is true, the entry cannot be purged.
* Note: if the CacheEntry is known, it is more efficient to use this
* method than sticky(), since sticky will preform a hashmap lookup
* to locate the cache entry.
*/
public void stickyCacheEntry(CacheEntry ce, boolean sticky) {
CacheEntryImpl target = (CacheEntryImpl) ce;
if (sticky) {
// Stiky => not purgeable.
if (!target.isLinked()) {
return;
}
target.unlink();
--size;
} else {
// ! Sticky => purgeable.
if (target.isLinked()) {
return;
}
if (size == maxSize) {
CacheEntryImpl toRm = (CacheEntryImpl) lru.next();
map.remove(toRm.getKey());
toRm.unlink();
if (listener != null) {
listener.purged(toRm);
}
--size;
}
lru.putLast(target);
++size;
}
}
/**
* Force the value associated with the given key to be purgeable or
* non-purgeable from the cache (non-sticky vs. sticky).
* Note: Most often, a call to the get() method will be performed
* before it can be decided to invoke sticky(). Whenever this is the case
* it is better to invoke getCacheEntry() + getValue() and then
* stickyCacheEntry() since that eliminates one hashmap lookup.
*/
public void sticky(Object key, boolean sticky) {
CacheEntry foundEntry = (CacheEntry) map.get(key);
if (foundEntry == null) {
return;
}
stickyCacheEntry(foundEntry, sticky);
}
}
| |
/*
* $Id$
* This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc
*
* Copyright (c) 2000-2012 Stephane GALLAND.
* Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports,
* Universite de Technologie de Belfort-Montbeliard.
* Copyright (c) 2013-2020 The original authors, and other authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.arakhne.afc.nodefx;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.Logger;
import javafx.application.ConditionalFeature;
import javafx.application.Platform;
import javafx.beans.binding.Bindings;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.css.CssMetaData;
import javafx.css.PseudoClass;
import javafx.css.Styleable;
import javafx.css.StyleableBooleanProperty;
import javafx.css.StyleableDoubleProperty;
import javafx.css.StyleableObjectProperty;
import javafx.css.StyleableProperty;
import javafx.css.converter.BooleanConverter;
import javafx.css.converter.EnumConverter;
import javafx.css.converter.SizeConverter;
import javafx.event.EventDispatchChain;
import javafx.event.EventDispatcher;
import javafx.event.EventHandler;
import javafx.geometry.Orientation;
import javafx.scene.AccessibleRole;
import javafx.scene.Cursor;
import javafx.scene.control.Control;
import javafx.scene.control.ScrollBar;
import javafx.scene.input.DragEvent;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.input.ScrollEvent;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.scene.shape.Rectangle;
import javafx.scene.shape.StrokeType;
import javafx.util.Duration;
import org.eclipse.xtext.xbase.lib.Pure;
import org.arakhne.afc.math.MathUtil;
import org.arakhne.afc.math.geometry.d2.afp.BoundedElement2afp;
import org.arakhne.afc.math.geometry.d2.afp.Rectangle2afp;
import org.arakhne.afc.util.InformedIterable;
import org.arakhne.afc.vmutil.asserts.AssertMessages;
/**
* Panel that is displaying the document elements and supporting zooming.
*
* @param <T> the type of the document.
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
* @since 15.0
*/
@SuppressWarnings("checkstyle:methodcount")
public class ZoomablePane<T extends InformedIterable<?> & BoundedElement2afp<?>> extends BorderPane
implements ZoomableViewer<T> {
/**
* A static final reference to whether the platform we are on supports touch.
*/
protected static final boolean IS_TOUCH_SUPPORTED = Platform.isSupported(ConditionalFeature.INPUT_TOUCH);
private static final double NO_TURN = 0.;
private static final double TURN = 180.;
private static final boolean DEFAULT_PANNABLE = true;
private static final double VERY_LARGE_MOVE_FACTOR = 100.;
private static final double LARGE_MOVE_FACTOR = 10.;
private static final double STANDARD_MOVE_FACTOR = 1.;
private static final double PAN_THRESHOLD = 10.;
private static final MouseButton DEFAULT_PAN_BUTTON = MouseButton.PRIMARY;
private static final double DEFAULT_PAN_SENSITIVITY = 1.;
private static final double MIN_PAN_SENSITIVITY = 0.01;
private static final String PANNABLE_PROPERTY = "pannable"; //$NON-NLS-1$
private static final String PAN_BUTTON_PROPERTY = "panButton"; //$NON-NLS-1$
private static final String PAN_SENSITIVITY_PROPERTY = "panSensitivity"; //$NON-NLS-1$
private static final String LOGGER_PROPERTY = "logger"; //$NON-NLS-1$
/**
* Initialize the style class to 'zoomable-view'.
*
* <p>This is the selector class from which CSS can be used to style
* this control.
*/
private static final String DEFAULT_STYLE_CLASS = "zoomable-view"; //$NON-NLS-1$
/** Specifies whether the user should be able to pan the viewport by using
* the mouse. If mouse events reach the {@code ZoomablePane} (that is, if mouse
* events are not blocked by the contained node or one of its children)
* then {@link #pannableProperty() pannable} is consulted to determine if the events should be
* used for panning.
*/
private BooleanProperty pannable;
/** The button that serves for starting the panning with mouse.
*/
private ObjectProperty<MouseButton> panButton;
/** The sensitivity of the panning moves. The sensitivity is a strictly positive number
* that multiplied by the distance covered by the mouse for obtaining the amount of move
* to apply to the document.
* By default, the sensitivity is 1.
*/
private DoubleProperty panSensitivity;
private ObjectProperty<Logger> logger;
private final ZoomableCanvas<T> documentCanvas;
private final ScrollBar vbar;
private final ScrollBar hbar;
private final ColorSquare corner;
private double pressX;
private double pressY;
private double hbarValue;
private double vbarValue;
private boolean dragDetected;
private Cursor savedCursor;
private boolean scrollDetected;
/** Constructor with the model to be displayed.
*
* @since 16.0
*/
public ZoomablePane() {
this(new ZoomableCanvas<>());
}
/** Constructor with the model to be displayed.
*
* @param model the source of the elements.
*/
public ZoomablePane(T model) {
this(new ZoomableCanvas<>(model));
}
/** Constructor with the canvas.
*
* @param canvas the pre-created canvas with the model to display inside.
*/
@SuppressWarnings({"unchecked", "checkstyle:methodlength", "checkstyle:executablestatementcount"})
public ZoomablePane(ZoomableCanvas<T> canvas) {
assert canvas != null : AssertMessages.notNullParameter();
this.documentCanvas = canvas;
//
getStyleClass().setAll(DEFAULT_STYLE_CLASS);
setAccessibleRole(AccessibleRole.SCROLL_PANE);
// focusTraversable is styleable through css. Calling setFocusTraversable
// makes it look to css like the user set the value and css will not
// override. Initializing focusTraversable by calling applyStyle with
// null StyleOrigin ensures that css will be able to override the value.
((StyleableProperty<Boolean>) focusTraversableProperty()).applyStyle(null, Boolean.TRUE);
//
this.vbar = new ScrollBar();
this.vbar.setOrientation(Orientation.VERTICAL);
this.hbar = new ScrollBar();
this.hbar.setOrientation(Orientation.HORIZONTAL);
this.corner = new ColorSquare();
this.corner.minWidthProperty().bind(this.vbar.widthProperty());
this.corner.maxWidthProperty().bind(this.vbar.widthProperty());
this.corner.minHeightProperty().bind(this.hbar.prefHeightProperty());
this.corner.maxHeightProperty().bind(this.hbar.prefHeightProperty());
final BorderPane bottomGroup = new BorderPane();
bottomGroup.setCenter(this.hbar);
bottomGroup.setRight(this.corner);
setCenter(getDocumentCanvas());
setRight(this.vbar);
setBottom(bottomGroup);
// Bind the scroll bars properties
this.vbar.minProperty().bind(Bindings.createDoubleBinding(
() -> getDocumentBounds().getMinY(),
documentBoundsProperty()));
this.vbar.maxProperty().bind(Bindings.createDoubleBinding(
() -> getDocumentBounds().getMaxY(),
documentBoundsProperty()));
this.vbar.visibleAmountProperty().bind(Bindings.createDoubleBinding(
() -> getViewportBounds().getHeight(),
viewportBoundsProperty()));
this.vbar.valueProperty().bindBidirectional(viewportCenterYProperty());
if (isInvertedAxisY()) {
this.vbar.setRotate(TURN);
}
getDocumentCanvas().invertedAxisYProperty().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) {
if (newValue != null && newValue.booleanValue()) {
ZoomablePane.this.vbar.setRotate(TURN);
} else {
ZoomablePane.this.vbar.setRotate(NO_TURN);
}
}
});
//
this.hbar.minProperty().bind(Bindings.createDoubleBinding(
() -> getDocumentBounds().getMinX(),
documentBoundsProperty()));
this.hbar.maxProperty().bind(Bindings.createDoubleBinding(
() -> getDocumentBounds().getMaxX(),
documentBoundsProperty()));
this.hbar.visibleAmountProperty().bind(Bindings.createDoubleBinding(
() -> getViewportBounds().getWidth(),
viewportBoundsProperty()));
this.hbar.valueProperty().bindBidirectional(viewportCenterXProperty());
if (isInvertedAxisX()) {
this.hbar.setRotate(TURN);
}
getDocumentCanvas().invertedAxisXProperty().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) {
if (newValue != null && newValue.booleanValue()) {
ZoomablePane.this.hbar.setRotate(TURN);
} else {
ZoomablePane.this.hbar.setRotate(NO_TURN);
}
}
});
// don't allow the ScrollBar to handle the ScrollEvent,
// In a view pane a vertical scroll should scroll on the vertical only,
// whereas in a horizontal ScrollBar it can scroll horizontally.
// block the event from being passed down to children
final EventDispatcher blockEventDispatcher = (event, tail) -> event;
// block ScrollEvent from being passed down to scrollbar's skin
final EventDispatcher oldHsbEventDispatcher = this.hbar.getEventDispatcher();
this.hbar.setEventDispatcher((event, tail) -> {
if (event.getEventType() == ScrollEvent.SCROLL && !((ScrollEvent) event).isDirect()) {
EventDispatchChain tail0 = tail.prepend(blockEventDispatcher);
tail0 = tail0.prepend(oldHsbEventDispatcher);
return tail0.dispatchEvent(event);
}
return oldHsbEventDispatcher.dispatchEvent(event, tail);
});
// block ScrollEvent from being passed down to scrollbar's skin
final EventDispatcher oldVsbEventDispatcher = this.vbar.getEventDispatcher();
this.vbar.setEventDispatcher((event, tail) -> {
if (event.getEventType() == ScrollEvent.SCROLL && !((ScrollEvent) event).isDirect()) {
EventDispatchChain tail0 = tail.prepend(blockEventDispatcher);
tail0 = tail0.prepend(oldVsbEventDispatcher);
return tail0.dispatchEvent(event);
}
return oldVsbEventDispatcher.dispatchEvent(event, tail);
});
//
setupKeying();
setupMousing();
setupListeners();
}
/** Replies the CssMetaData associated with this class, which may include the
* CssMetaData of its super classes.
*
* @return the metadata.
*/
public static List<CssMetaData<? extends Styleable, ?>> getClassCssMetaData() {
return StyleableProperties.STYLEABLES;
}
@Override
public List<CssMetaData<? extends Styleable, ?>> getCssMetaData() {
return getClassCssMetaData();
}
/** Setup the response based on listeners.
*/
protected void setupListeners() {
addDrawingListener(new DrawingListener() {
private long time;
@Override
public void onDrawingStart() {
this.time = System.currentTimeMillis();
getCorner().setColor(Color.ORANGERED);
}
@Override
public void onDrawingEnd() {
getCorner().setColor(null);
final long duration = System.currentTimeMillis() - this.time;
getLogger().fine("Rendering duration: " + Duration.millis(duration).toString()); //$NON-NLS-1$
}
});
}
/** Replies the default event handler that may be used for starting the panning action on the panel.
*
* @return the event handler.
* @since 15.0
*/
public EventHandler<MouseEvent> getDefaultOnDragDetectedEventHandler() {
return e -> {
if (isPannable() && e.getButton() == getPanButton() && !e.isPopupTrigger()) {
this.dragDetected = true;
if (this.savedCursor == null) {
final ZoomableCanvas<T> canvas0 = getDocumentCanvas();
this.savedCursor = canvas0.getCursor();
if (this.savedCursor == null) {
this.savedCursor = Cursor.DEFAULT;
}
canvas0.setCursor(Cursor.MOVE);
requestLayout();
}
}
};
}
/** Replies the default event handler that may be used for starting the panning action on the panel.
*
* @return the event handler.
* @since 15.0
*/
public EventHandler<MouseEvent> getDefaultOnMouseDraggedEventHandler() {
return e -> {
// for mobile-touch we allow drag, even if not pannable
if (this.dragDetected || IS_TOUCH_SUPPORTED) {
if (!this.dragDetected) {
this.dragDetected = true;
if (this.savedCursor == null) {
final ZoomableCanvas<T> canvas0 = getDocumentCanvas();
this.savedCursor = canvas0.getCursor();
if (this.savedCursor == null) {
this.savedCursor = Cursor.DEFAULT;
}
canvas0.setCursor(Cursor.MOVE);
requestLayout();
}
}
final double sensitivity = getPanSensitivity(e.isShiftDown(), e.isControlDown());
// we only drag if not all of the content is visible.
if (this.hbar.getVisibleAmount() > 0. && this.hbar.getVisibleAmount() < this.hbar.getMax()) {
double deltaX = this.pressX - e.getX();
if (isInvertedAxisX()) {
deltaX = -deltaX;
}
if (Math.abs(deltaX) > PAN_THRESHOLD) {
final double delta = getDocumentCanvas().getDocumentGraphicsContext2D()
.fx2docSize(deltaX) * sensitivity;
double newHVal = this.hbarValue + delta;
newHVal = MathUtil.clamp(this.hbar.getMin(), newHVal, this.hbar.getMax());
this.hbar.setValue(newHVal);
}
}
// we only drag if not all of the content is visible.
if (this.vbar.getVisibleAmount() > 0. && this.vbar.getVisibleAmount() < this.vbar.getMax()) {
double deltaY = this.pressY - e.getY();
if (isInvertedAxisY()) {
deltaY = -deltaY;
}
if (Math.abs(deltaY) >= PAN_THRESHOLD) {
final double delta = getDocumentCanvas().getDocumentGraphicsContext2D()
.fx2docSize(deltaY) * sensitivity;
double newVVal = this.vbarValue + delta;
newVVal = MathUtil.clamp(this.vbar.getMin(), newVVal, this.vbar.getMax());
this.vbar.setValue(newVVal);
}
}
// we need to consume drag events, as we don't want the view pane itself to be dragged on every mouse click
e.consume();
}
};
}
/** Setup the response of the pane to mouse events.
*/
@SuppressWarnings({"checkstyle:cyclomaticcomplexity", "checkstyle:npathcomplexity", "checkstyle:nestedifdepth"})
protected void setupMousing() {
final ZoomableCanvas<T> canvas = getDocumentCanvas();
canvas.addEventHandler(MouseEvent.MOUSE_PRESSED, e -> {
this.pressX = e.getX();
this.pressY = e.getY();
this.hbarValue = this.hbar.getValue();
this.vbarValue = this.vbar.getValue();
});
setOnDragDetected(getDefaultOnDragDetectedEventHandler());
addEventFilter(MouseEvent.MOUSE_RELEASED, e -> {
if (this.dragDetected) {
this.dragDetected = false;
final Cursor scurs = this.savedCursor;
this.savedCursor = null;
if (scurs != null) {
getDocumentCanvas().setCursor(scurs);
requestLayout();
}
}
});
addEventHandler(DragEvent.DRAG_DONE, event -> {
if (this.dragDetected) {
this.dragDetected = false;
final Cursor scurs = this.savedCursor;
this.savedCursor = null;
if (scurs != null) {
getDocumentCanvas().setCursor(scurs);
requestLayout();
}
}
});
addEventHandler(MouseEvent.MOUSE_DRAGGED, getDefaultOnMouseDraggedEventHandler());
addEventHandler(ScrollEvent.SCROLL_STARTED, event -> {
this.scrollDetected = true;
});
addEventHandler(ScrollEvent.SCROLL_FINISHED, event -> {
this.scrollDetected = false;
});
addEventHandler(ScrollEvent.SCROLL, event -> {
if (!this.scrollDetected) {
event.consume();
final double delta;
if (event.getDeltaY() != 0.) {
delta = event.getDeltaY();
} else {
delta = event.getDeltaX();
}
if (delta < 0) {
zoomOut();
} else {
zoomIn();
}
}
});
}
/** Setup the response of the pane to key events.
*/
protected void setupKeying() {
setOnKeyPressed(event -> {
switch (event.getCode()) {
case LEFT:
moveLeft(event.isShiftDown(), event.isControlDown(), false);
event.consume();
break;
case RIGHT:
moveRight(event.isShiftDown(), event.isControlDown(), false);
event.consume();
break;
case UP:
if (event.isAltDown()) {
zoomIn();
} else {
moveUp(event.isShiftDown(), event.isControlDown(), false);
}
event.consume();
break;
case DOWN:
if (event.isAltDown()) {
zoomOut();
} else {
moveDown(event.isShiftDown(), event.isControlDown(), false);
}
event.consume();
break;
case PAGE_UP:
if (event.isControlDown()) {
moveLeft(false, false, true);
} else {
moveUp(false, false, true);
}
event.consume();
break;
case PAGE_DOWN:
if (event.isControlDown()) {
moveRight(false, false, true);
} else {
moveDown(false, false, true);
}
event.consume();
break;
//$CASES-OMITTED$
default:
}
});
}
/** Move the viewport left.
*
* @param isUnit indicates if the move is a unit move. If {@code true}, this argument has precedence to the other arguments.
* @param isLarge indicates if the move is a large move. If {@code true}, this argument has precedence
* to the very large argument.
* @param isVeryLarge indicates if the move is a very large move.
*/
public void moveLeft(boolean isUnit, boolean isLarge, boolean isVeryLarge) {
double inc = isUnit ? this.hbar.getUnitIncrement()
: (isLarge ? LARGE_MOVE_FACTOR
: (isVeryLarge ? VERY_LARGE_MOVE_FACTOR : STANDARD_MOVE_FACTOR)) * this.hbar.getBlockIncrement();
if (!isInvertedAxisX()) {
inc = -inc;
}
this.hbar.setValue(MathUtil.clamp(this.hbar.getMin(), this.hbar.getValue() + inc, this.hbar.getMax()));
}
/** Move the viewport right.
*
* @param isUnit indicates if the move is a unit move. If {@code true}, this argument has precedence to the other arguments.
* @param isLarge indicates if the move is a large move. If {@code true}, this argument has precedence
* to the very large argument.
* @param isVeryLarge indicates if the move is a very large move.
*/
public void moveRight(boolean isUnit, boolean isLarge, boolean isVeryLarge) {
double inc = isUnit ? this.hbar.getUnitIncrement()
: (isLarge ? LARGE_MOVE_FACTOR
: (isVeryLarge ? VERY_LARGE_MOVE_FACTOR : STANDARD_MOVE_FACTOR)) * this.hbar.getBlockIncrement();
if (isInvertedAxisX()) {
inc = -inc;
}
this.hbar.setValue(MathUtil.clamp(this.hbar.getMin(), this.hbar.getValue() + inc, this.hbar.getMax()));
}
/** Move the viewport up.
*
* @param isUnit indicates if the move is a unit move. If {@code true}, this argument has precedence to the other arguments.
* @param isLarge indicates if the move is a large move. If {@code true}, this argument has precedence
* to the very large argument.
* @param isVeryLarge indicates if the move is a very large move.
*/
public void moveUp(boolean isUnit, boolean isLarge, boolean isVeryLarge) {
double inc = isUnit ? this.vbar.getUnitIncrement()
: (isLarge ? LARGE_MOVE_FACTOR
: (isVeryLarge ? VERY_LARGE_MOVE_FACTOR : STANDARD_MOVE_FACTOR)) * this.vbar.getBlockIncrement();
if (!isInvertedAxisY()) {
inc = -inc;
}
this.vbar.setValue(MathUtil.clamp(this.vbar.getMin(), this.vbar.getValue() + inc, this.vbar.getMax()));
}
/** Move the viewport down.
*
* @param isUnit indicates if the move is a unit move. If {@code true}, this argument has precedence to the other arguments.
* @param isLarge indicates if the move is a large move. If {@code true}, this argument has precedence
* to the very large argument.
* @param isVeryLarge indicates if the move is a very large move.
*/
public void moveDown(boolean isUnit, boolean isLarge, boolean isVeryLarge) {
double inc = isUnit ? this.vbar.getUnitIncrement()
: (isLarge ? LARGE_MOVE_FACTOR
: (isVeryLarge ? VERY_LARGE_MOVE_FACTOR : STANDARD_MOVE_FACTOR)) * this.vbar.getBlockIncrement();
if (isInvertedAxisY()) {
inc = -inc;
}
this.vbar.setValue(MathUtil.clamp(this.vbar.getMin(), this.vbar.getValue() + inc, this.vbar.getMax()));
}
/** Replies the property that contains the logger.
*
* @return the logger.
*/
public ObjectProperty<Logger> loggerProperty() {
if (this.logger == null) {
this.logger = new SimpleObjectProperty<>(this, LOGGER_PROPERTY, Logger.getLogger(getClass().getName())) {
@Override
protected void invalidated() {
final Logger log = get();
if (log == null) {
set(Logger.getLogger(getClass().getName()));
}
}
};
}
return this.logger;
}
/** Replies the logger associated to this pane.
*
* @return the logger.
*/
public Logger getLogger() {
return loggerProperty().get();
}
/** Change the logger associated to this pane.
*
* @param logger the logger.
*/
public void setLogger(Logger logger) {
loggerProperty().set(logger);
}
/** Replies the property that indicates if the user could be able to pan the viewport by using
* the mouse. If mouse events reach the {@code ZoomablePane} (that is, if mouse
* events are not blocked by the contained node or one of its children)
* then {@link #pannableProperty() pannable} is consulted to determine if the events should be
* used for panning.
*
* @return the property.
*/
public BooleanProperty pannableProperty() {
if (this.pannable == null) {
this.pannable = new StyleableBooleanProperty(DEFAULT_PANNABLE) {
@Override
public void invalidated() {
pseudoClassStateChanged(StyleableProperties.PANNABLE_PSEUDOCLASS_STATE, get());
}
@Override
public CssMetaData<ZoomablePane<?>, Boolean> getCssMetaData() {
return StyleableProperties.PANNABLE;
}
@Override
public Object getBean() {
return ZoomablePane.this;
}
@Override
public String getName() {
return PANNABLE_PROPERTY;
}
};
}
return this.pannable;
}
/** Change the property that indicates if the user could be able to pan the viewport by using
* the mouse. If mouse events reach the {@code ZoomablePane} (that is, if mouse
* events are not blocked by the contained node or one of its children)
* then {@link #pannableProperty() pannable} is consulted to determine if the events should be
* used for panning.
*
* @param value {@code true} to enable the panning, {@code false} to disable the panning.
*/
public final void setPannable(boolean value) {
pannableProperty().set(value);
}
/** Replies the flag that indicates if the user could be able to pan the viewport by using
* the mouse. If mouse events reach the {@code ZoomablePane} (that is, if mouse
* events are not blocked by the contained node or one of its children)
* then {@link #pannableProperty() pannable} is consulted to determine if the events should be
* used for panning.
*
* @return {@code true} to enable the panning, {@code false} to disable the panning.
*/
public final boolean isPannable() {
return this.pannable == null ? DEFAULT_PANNABLE : this.pannable.get();
}
/** Replies the property for the button that serves for starting the mouse scrolling.
*
* @return the property.
*/
public ObjectProperty<MouseButton> panButtonProperty() {
if (this.panButton == null) {
this.panButton = new StyleableObjectProperty<>(DEFAULT_PAN_BUTTON) {
@Override
protected void invalidated() {
final MouseButton button = get();
if (button == null) {
set(DEFAULT_PAN_BUTTON);
}
}
@Override
public CssMetaData<ZoomablePane<?>, MouseButton> getCssMetaData() {
return StyleableProperties.PAN_BUTTON;
}
@Override
public Object getBean() {
return ZoomablePane.this;
}
@Override
public String getName() {
return PAN_BUTTON_PROPERTY;
}
};
}
return this.panButton;
}
/** Replies the button that serves for starting the mouse scrolling.
*
* @return the mouse button that permits to start the panning.
*/
public final MouseButton getPanButton() {
return this.panButton == null ? DEFAULT_PAN_BUTTON : this.panButton.get();
}
/** Change the button that serves for starting the mouse scrolling.
*
* @param button the mouse button that permits to start the panning.
*/
public final void setPanButton(MouseButton button) {
panButtonProperty().set(button);
}
/** Replies the property that indicates the sensibility of the panning moves.
* The sensibility is a strictly positive number that is multiplied to the
* distance covered by the mouse motion for obtaining the move to
* apply to the document.
* The default value is 1.
*
* @return the property.
*/
public DoubleProperty panSensitivityProperty() {
if (this.panSensitivity == null) {
this.panSensitivity = new StyleableDoubleProperty(DEFAULT_PAN_SENSITIVITY) {
@Override
public void invalidated() {
if (get() <= MIN_PAN_SENSITIVITY) {
set(MIN_PAN_SENSITIVITY);
}
}
@Override
public CssMetaData<ZoomablePane<?>, Number> getCssMetaData() {
return StyleableProperties.PAN_SENSITIVITY;
}
@Override
public Object getBean() {
return ZoomablePane.this;
}
@Override
public String getName() {
return PAN_SENSITIVITY_PROPERTY;
}
};
}
return this.panSensitivity;
}
/** Change the sensibility of the panning moves.
* The sensibility is a strictly positive number that is multiplied to the
* distance covered by the mouse motion for obtaining the move to
* apply to the document.
* The default value is 1.
*
* @param value the sensitivity.
*/
public final void setPanSensitivity(double value) {
panSensitivityProperty().set(value);
}
/** Replies the sensibility of the panning moves.
* The sensibility is a strictly positive number that is multiplied to the
* distance covered by the mouse motion for obtaining the move to
* apply to the document.
* The default value is 1.
*
* @return the sensitivity.
*/
public final double getPanSensitivity() {
return this.panSensitivity == null ? DEFAULT_PAN_SENSITIVITY : this.panSensitivity.get();
}
/** Replies the sensibility of the panning moves after applying dynamic user interaction modifiers.
* The sensibility is a strictly positive number that is multiplied to the
* distance covered by the mouse motion for obtaining the move to
* apply to the document.
* The default value is 1.
*
* <p>This function is usually used for computing the sensibility within mouse handlers.
* The Shift and Control key flags may be used as the modifiers.
*
* <p>If {@code unitSensitivityModifier} is {@code true}, the sensibility is always {@code 1}.
* If {@code hugeSensivityModifier} is {@code true}, the sensibility is multiplied by {@link #LARGE_MOVE_FACTOR}.
* Otherwise, the value returned by {@link #getPanSensitivity()} is returned.
*
* @param unitSensitivityModifier the user chooses the unit sensitivity dynamically. If {@code true}, this
* parameter has precedence to the other parameters.
* @param hugeSensivityModifier the user chooses a huge sensitivity dynamically.
* @return the sensitivity.
*/
public double getPanSensitivity(boolean unitSensitivityModifier, boolean hugeSensivityModifier) {
if (unitSensitivityModifier) {
return DEFAULT_PAN_SENSITIVITY;
}
final double sens = getPanSensitivity();
if (hugeSensivityModifier) {
return sens * LARGE_MOVE_FACTOR;
}
return sens;
}
/** Replies the document canvas within this pane.
*
* @return the document canvas.
*/
@Pure
public ZoomableCanvas<T> getDocumentCanvas() {
return this.documentCanvas;
}
/** Replies the corner pane.
*
* @return the corner pane.
*/
@Pure
public ColorSquare getCorner() {
return this.corner;
}
@Override
public final ObjectProperty<T> documentModelProperty() {
return getDocumentCanvas().documentModelProperty();
}
@Override
public final T getDocumentModel() {
return getDocumentCanvas().getDocumentModel();
}
@Override
public final void setDocumentModel(T model) {
getDocumentCanvas().setDocumentModel(model);
}
@Override
public final ObjectProperty<Drawer<? super T>> documentDrawerProperty() {
return getDocumentCanvas().documentDrawerProperty();
}
@Override
public final Drawer<? super T> getDocumentDrawer() {
return getDocumentCanvas().getDocumentDrawer();
}
@Override
public final void setDocumentDrawer(Drawer<? super T> drawer) {
getDocumentCanvas().setDocumentDrawer(drawer);
}
@Override
public final ReadOnlyObjectProperty<Rectangle2afp<?, ?, ?, ?, ?, ?>> documentBoundsProperty() {
return getDocumentCanvas().documentBoundsProperty();
}
@Override
public final Rectangle2afp<?, ?, ?, ?, ?, ?> getDocumentBounds() {
return getDocumentCanvas().getDocumentBounds();
}
@Override
public final DoubleProperty scaleValueProperty() {
return getDocumentCanvas().scaleValueProperty();
}
@Override
public final double getScaleValue() {
return getDocumentCanvas().getScaleValue();
}
@Override
public final void setScaleValue(double value) {
getDocumentCanvas().setScaleValue(value);
}
@Override
public final void setScaleValue(double scaleValue, double centerX, double centerY) {
getDocumentCanvas().setScaleValue(scaleValue, centerX, centerY);
}
@Override
public final DoubleProperty minScaleValueProperty() {
return getDocumentCanvas().minScaleValueProperty();
}
@Override
public final double getMinScaleValue() {
return getDocumentCanvas().getMinScaleValue();
}
@Override
public final void setMinScaleValue(double value) {
getDocumentCanvas().setMinScaleValue(value);
}
@Override
public final DoubleProperty maxScaleValueProperty() {
return getDocumentCanvas().maxScaleValueProperty();
}
@Override
public final double getMaxScaleValue() {
return getDocumentCanvas().getMaxScaleValue();
}
@Override
public final void setMaxScaleValue(double value) {
getDocumentCanvas().setMaxScaleValue(value);
}
@Override
public final DoubleProperty viewportCenterXProperty() {
return getDocumentCanvas().viewportCenterXProperty();
}
@Override
public final double getViewportCenterX() {
return getDocumentCanvas().getViewportCenterX();
}
@Override
public final void setViewportCenterX(double x) {
getDocumentCanvas().setViewportCenterX(x);
}
@Override
public final DoubleProperty viewportCenterYProperty() {
return getDocumentCanvas().viewportCenterYProperty();
}
@Override
public final double getViewportCenterY() {
return getDocumentCanvas().getViewportCenterY();
}
@Override
public final void setViewportCenterY(double y) {
getDocumentCanvas().setViewportCenterY(y);
}
@Override
public final void setViewportCenter(double x, double y) {
getDocumentCanvas().setViewportCenter(x, y);
}
@Override
public final ReadOnlyObjectProperty<Rectangle2afp<?, ?, ?, ?, ?, ?>> viewportBoundsProperty() {
return getDocumentCanvas().viewportBoundsProperty();
}
@Override
public final Rectangle2afp<?, ?, ?, ?, ?, ?> getViewportBounds() {
return getDocumentCanvas().getViewportBounds();
}
@Override
public final double getScaleValueToFit(boolean minimizeOnly) {
return getDocumentCanvas().getScaleValueToFit(minimizeOnly);
}
@Override
public final void zoomOut(double centerX, double centerY) {
getDocumentCanvas().zoomOut(centerX, centerY);
}
@Override
public final void zoomIn(double centerX, double centerY) {
getDocumentCanvas().zoomIn(centerX, centerY);
}
@Override
public final DoubleProperty scaleChangeProperty() {
return getDocumentCanvas().scaleChangeProperty();
}
@Override
public final double getScaleChange() {
return getDocumentCanvas().getScaleChange();
}
@Override
public final void setScaleChange(double change) {
getDocumentCanvas().setScaleChange(change);
}
@Override
public final BooleanProperty invertedAxisXProperty() {
return getDocumentCanvas().invertedAxisXProperty();
}
@Override
public final boolean isInvertedAxisX() {
return getDocumentCanvas().isInvertedAxisX();
}
@Override
public final void setInvertedAxisX(boolean inverted) {
getDocumentCanvas().setInvertedAxisX(inverted);
}
@Override
public final BooleanProperty invertedAxisYProperty() {
return getDocumentCanvas().invertedAxisYProperty();
}
@Override
public final boolean isInvertedAxisY() {
return getDocumentCanvas().isInvertedAxisY();
}
@Override
public final void setInvertedAxisY(boolean inverted) {
getDocumentCanvas().setInvertedAxisY(inverted);
}
@Override
public final IntegerProperty drawableElementBudgetProperty() {
return getDocumentCanvas().drawableElementBudgetProperty();
}
@Override
public final int getDrawableElementBudget() {
return getDocumentCanvas().getDrawableElementBudget();
}
@Override
public final void setDrawableElementBudget(int budget) {
getDocumentCanvas().setDrawableElementBudget(budget);
}
@Override
public void addDrawingListener(DrawingListener listener) {
getDocumentCanvas().addDrawingListener(listener);
}
@Override
public void removeDrawingListener(DrawingListener listener) {
getDocumentCanvas().removeDrawingListener(listener);
}
@Override
public double toDocumentPositionX(double x) {
return getDocumentCanvas().toDocumentPositionX(x);
}
@Override
public double toDocumentPositionY(double y) {
return getDocumentCanvas().toDocumentPositionY(y);
}
@Override
public double toDocumentSize(double size) {
return getDocumentCanvas().toDocumentSize(size);
}
@Override
public double toScreenPositionX(double x) {
return getDocumentCanvas().toScreenPositionX(x);
}
@Override
public double toScreenPositionY(double y) {
return getDocumentCanvas().toScreenPositionY(y);
}
@Override
public double toScreenSize(double size) {
return getDocumentCanvas().toScreenSize(size);
}
@Override
public void drawContent() {
getDocumentCanvas().drawContent();
}
/** The color square at the corner of a {@code ZoomablePane}.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
* @since 15.0
*/
public static class ColorSquare extends StackPane {
private static final int SQUARE_SIZE = 15;
private final Rectangle rectangle;
private boolean isEmpty;
/** Constructor of empty square.
*/
public ColorSquare() {
this(null);
}
/** Constructor with the given color.
*
* @param color the color of the square.
*/
public ColorSquare(Color color) {
// Add style class to handle selected color square
getStyleClass().add("color-square"); //$NON-NLS-1$
setFocusTraversable(false);
this.rectangle = new Rectangle(SQUARE_SIZE, SQUARE_SIZE);
this.rectangle.setStrokeType(StrokeType.INSIDE);
this.rectangle.getStyleClass().add("color-rect"); //$NON-NLS-1$
setColor(color);
getChildren().add(this.rectangle);
}
@Override
public boolean isResizable() {
return false;
}
/** Replies if the square is empty and has not specific color.
*
* @return {@code true} if the square is empty.
*/
public boolean isEmpty() {
return this.isEmpty;
}
/** Replies the color.
*
* @return the color of the square.
*/
public Color getPaint() {
return (Color) this.rectangle.getFill();
}
/** Change the color.
*
* @param color the color of the square.
*/
public void setColor(Color color) {
this.isEmpty = color == null;
if (this.isEmpty) {
this.rectangle.setFill(Color.TRANSPARENT);
} else {
this.rectangle.setFill(color);
}
}
}
/** Super-lazy instantiation pattern, inspired by the one from Bill Pugh.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
* @since 15.0
* @treatAsPrivate
*/
private static class StyleableProperties {
/** Pannable pseudo class state.
*/
public static final PseudoClass PANNABLE_PSEUDOCLASS_STATE = PseudoClass.getPseudoClass("pannable"); //$NON-NLS-1$
public static final List<CssMetaData<? extends Styleable, ?>> STYLEABLES;
/** Pannable CSS metadata.
*/
public static final CssMetaData<ZoomablePane<?>, Boolean> PANNABLE = new CssMetaData<>(
"-afc-pannable", //$NON-NLS-1$
BooleanConverter.getInstance(), Boolean.FALSE) {
@Override
public boolean isSettable(ZoomablePane<?> pane) {
return pane.pannable == null || !pane.pannable.isBound();
}
@SuppressWarnings("unchecked")
@Override
public StyleableProperty<Boolean> getStyleableProperty(ZoomablePane<?> pane) {
return (StyleableProperty<Boolean>) pane.pannableProperty();
}
};
/** PanButton CSS metadata.
*/
public static final CssMetaData<ZoomablePane<?>, MouseButton> PAN_BUTTON =
new CssMetaData<>(
"-afc-panbutton", //$NON-NLS-1$
new EnumConverter<>(MouseButton.class), DEFAULT_PAN_BUTTON) {
@Override
public boolean isSettable(ZoomablePane<?> pane) {
return pane.pannable == null || !pane.pannable.isBound();
}
@SuppressWarnings("unchecked")
@Override
public StyleableProperty<MouseButton> getStyleableProperty(ZoomablePane<?> pane) {
return (StyleableProperty<MouseButton>) pane.panButtonProperty();
}
};
/** PanButton CSS metadata.
*/
public static final CssMetaData<ZoomablePane<?>, Number> PAN_SENSITIVITY = new CssMetaData<>(
"-afc-panbutton", //$NON-NLS-1$
SizeConverter.getInstance(), DEFAULT_PAN_SENSITIVITY) {
@Override
public boolean isSettable(ZoomablePane<?> pane) {
return pane.pannable == null || !pane.pannable.isBound();
}
@SuppressWarnings("unchecked")
@Override
public StyleableProperty<Number> getStyleableProperty(ZoomablePane<?> pane) {
return (StyleableProperty<Number>) pane.panSensitivityProperty();
}
};
static {
final List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<>(Control.getClassCssMetaData());
styleables.add(PANNABLE);
styleables.add(PAN_BUTTON);
styleables.add(PAN_SENSITIVITY);
STYLEABLES = Collections.unmodifiableList(styleables);
}
@Override
public String toString() {
return STYLEABLES.toString();
}
}
}
| |
/*---------------------------------------------------------------------------*\
This software is released under a BSD license, adapted from
<http://opensource.org/licenses/bsd-license.php>
Copyright © 2004-2012 Brian M. Clapper.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name "clapper.org", "curn", nor the names of the project's
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\*---------------------------------------------------------------------------*/
package org.clapper.curn.output.script;
import org.clapper.curn.Constants;
import org.clapper.curn.CurnConfig;
import org.clapper.curn.ConfiguredOutputHandler;
import org.clapper.curn.CurnException;
import org.clapper.curn.FeedInfo;
import org.clapper.curn.Version;
import org.clapper.curn.output.FileOutputHandler;
import org.clapper.curn.parser.RSSChannel;
import org.clapper.util.config.ConfigurationException;
import org.clapper.util.config.NoSuchSectionException;
import org.clapper.util.io.FileUtil;
import org.clapper.util.logging.Logger;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collection;
import javax.script.ScriptEngineManager;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import org.clapper.curn.CurnUtil;
/**
* Provides an output handler calls a script via the Apache Jakarta
* {@link <a href="http://jakarta.apache.org/bsf/">Bean Scripting Framework</a>}
* (BSF). This handler supports any scripting language supported by BSF. In
* addition to the configuration parameters supported by the
* {@link FileOutputHandler} base class, this handler supports the
* following additional configuration variables, which must be specified in
* the handler's configuration section.
*
* <table border="1" align="center">
* <tr>
* <th>Parameter</th>
* <th>Explanation</th>
* </tr>
*
* <tr>
* <td><tt>Script</tt></td>
* <td>Path to the script to be invoked. The script will be called
* as if from the command line, except that additional objects will
* be available via BSF.
* </td>
* </tr>
*
* <tr>
* <td><tt>Language</tt></td>
* <td><p>The scripting language, as recognized by BSF. This handler
* supports all the scripting language engines that are registered
* with the BSF software. (Those predefined engines are configured in
* a properties file within the BSF software.) Some of the
* scripting language engines are actually bundled with BSF. Some
* are not. Regardless, of
* course, the actual the jar files for the scripting
* languages themselves must be in the CLASSPATH at runtime, for those
* languages to be available.</p>
*
* <p>If you want to use a BSF scripting language engine that isn't
* automatically registered with BSF, simply extend this class and
* override the {@link #registerAdditionalScriptingEngines} method.
* In that method, call <tt>BSFManager.registerScriptingEngine()</tt>
* for each additional language you want to support. For example,
* to provide a handler that supports
* {@link <a href="http://www.judoscript.com/">JudoScript</a>},
* you might write an output handler that looks like this:</p>
* <blockquote><pre>
* import org.clapper.curn.CurnException;
* import org.clapper.curn.output.script.ScriptOutputHandler;
* import org.apache.bsf.BSFManager;
*
* public class MyOutputHandler extends ScriptOutputHandler
* {
* public JudoScriptOutputHandler()
* {
* super();
* }
*
* public void registerAdditionalScriptingEngines()
* throws CurnException
* {
* BSFManager.registerScriptingEngine ("mylang",
* "com.example.BSFMyLangEngine",
* new String[] {"ml", "myl"});
* }
* }
* </pre></blockquote>
*
* Then, simply use your class instead of <tt>ScriptOutputHandler</tt>
* in your configuration file.
* </td>
* </tr>
* </table>
*
* <p>This handler's {@link #displayChannel displayChannel()} method does
* not invoke the script; instead, it buffers up all the channels so that
* the {@link #flush} method can invoke the script. That way, the overhead
* of invoking the script only occurs once. Via the BSF engine, this
* handler makes available an iterator of special objects that wrap both
* the {@link RSSChannel} and {@link FeedInfo} objects for a given channel.
* See below for a more complete description.</p>
*
* <p>The complete list of objects bound into the BSF beanspace follows.</p>
*
* <table border="0">
* <tr valign="top">
* <th>Bound name</th>
* <th>Java type</th>
* <th>Explanation</th>
* </tr>
*
* <tr valign="top">
* <td>channels</td>
* <td><tt>java.util.Collection</tt></td>
* <td>An <tt>Collection</tt> of special internal objects that wrap
* both {@link RSSChannel} and {@link FeedInfo} objects. The
* wrapper objects provide two methods:</td>
*
* <ul>
* <li><tt>getChannel()</tt> gets the <tt>RSSChannel</tt> object
* <li><tt>getFeedInfo()</tt> gets the <tt>FeedInfo</tt> object
* </ul>
* </tr>
*
* <tr valign="top">
* <td>outputPath</td>
* <td><tt>java.lang.String</tt></td>
* <td>The path to an output file. The script should write its output
* to that file. Overwriting the file is fine. If the script generates
* no output, then it can ignore the file.</td>
* </tr>
*
* <tr valign="top">
* <td>config</td>
* <td><tt>{@link CurnConfig}</tt></td>
* <td>The <tt>org.clapper.curn.CurnConfig</tt> object that represents
* the parsed configuration data. Useful in conjunction with the
* "configSection" object, to parse additional parameters from
* the configuration.</td>
* </tr>
*
* <tr valign="top">
* <td>configSection</td>
* <td><tt>java.lang.String</tt></td>
* <td>The name of the configuration file section in which the output
* handler was defined. Useful if the script wants to access
* additional script-specific configuration data.</td>
* </tr>
*
* <tr valign="top">
* <td>mimeType</td>
* <td><tt>java.io.PrintWriter</tt></td>
* <td>A <tt>PrintWriter</tt> object to which the script should print
* the MIME type that corresponds to the generated output.
* If the script generates no output, then it can ignore this
* object.</td>
* </tr>
*
* <tr valign="top">
* <td>logger</td>
* <td>{@link Logger org.clapper.util.logging.Logger}</td>
* <td>A <tt>Logger</tt> object, useful for logging messages to
* the <i>curn</i> log file.</td>
* </tr>
*
* <tr valign="top">
* <td>version</td>
* <td><tt>java.lang.String</tt></td>
* <td>Full <i>curn</i> version string, in case the script wants to
* include it in the generated output
* </tr>
* </table>
*
* <p>For example, the following Jython script can be used as a template
* for a Jython output handler.</p>
*
* <blockquote>
* <pre>
* import sys
*
* def __init__ (self):
* """
* Initialize a new TextOutputHandler object.
* """
* self.__channels = bsf.lookupBean ("channels")
* self.__outputPath = bsf.lookupBean ("outputPath")
* self.__mimeTypeOut = bsf.lookupBean ("mimeType")
* self.__config = bsf.lookupBean ("config")
* self.__sectionName = bsf.lookupBean ("configSection")
* self.__logger = bsf.lookupBean ("logger");
* self.__version = bsf.lookupBean ("version")
* self.__message = None
*
* def processChannels (self):
* """
* Process the channels passed in through the Bean Scripting Framework.
* """
*
* out = open (self.__outputPath, "w")
* msg = self.__config.getOptionalStringValue (self.__sectionName,
* "Message",
* None)
*
* totalNew = 0
*
* # First, count the total number of new items
*
* iterator = self.__channels.iterator()
* while iterator.hasNext():
* channel_wrapper = iterator.next()
* channel = channel_wrapper.getChannel()
* totalNew = totalNew + channel.getItems().size()
*
* if totalNew > 0:
* # If the config file specifies a message for this handler,
* # display it.
*
* if msg != None:
* out.println (msg)
* out.println ()
*
* # Now, process the items
*
* iterator = self.__channels.iterator()
* while iterator.hasNext():
* channel_wrapper = iterator.next()
* channel = channel_wrapper.getChannel()
* feed_info = channel_wrapper.getFeedInfo()
* self.__process_channel (out, channel, feed_info, indentation)
*
* self.__mimeTypeBuf.print ("text/plain")
*
* # Output a footer
*
* self.__indent (out, indentation)
* out.write ("\n")
* out.write (self.__version + "\n")
* out.close ()
*
* def process_channel (channel, feed_info):
* item_iterator = channel.getItems().iterator()
* while item_iterator.hasNext():
* # Do output for item
* ...
*
* main()
* </pre>
* </blockquote>
*
* @see org.clapper.curn.OutputHandler
* @see FileOutputHandler
* @see org.clapper.curn.Curn
* @see org.clapper.curn.parser.RSSChannel
*
* @version <tt>$Revision$</tt>
*/
public class ScriptOutputHandler extends FileOutputHandler
{
/*----------------------------------------------------------------------*\
Private Constants
\*----------------------------------------------------------------------*/
/*----------------------------------------------------------------------*\
Inner Classes
\*----------------------------------------------------------------------*/
/**
* Wraps an RSSChannel object and its FeedInfo object.
*/
public class ChannelWrapper
{
private RSSChannel channel;
private FeedInfo feedInfo;
ChannelWrapper(RSSChannel channel, FeedInfo feedInfo)
{
this.channel = channel;
this.feedInfo = feedInfo;
}
public RSSChannel getChannel()
{
return this.channel;
}
public FeedInfo getFeedInfo()
{
return this.feedInfo;
}
}
/**
* Type alias
*/
private class ChannelList extends ArrayList<ChannelWrapper>
{
ChannelList()
{
super();
}
}
/**
* Container for the objects exported to the script.
*/
public class CurnScriptObjects
{
public Collection channels = null;
public String outputPath = null;
public CurnConfig config = null;
public String configSection = null;
public Logger logger = null; // NOPMD
String mimeType = null;
CurnScriptObjects()
{
// Nothing to do
}
public void setMIMEType(String mimeType)
{
this.mimeType = mimeType;
}
public String getVersion()
{
return Version.getInstance().getFullVersion();
}
}
/*----------------------------------------------------------------------*\
Private Data Items
\*----------------------------------------------------------------------*/
private ScriptEngineManager scriptManager = null;
private ScriptEngine scriptEngine = null;
private Collection<ChannelWrapper> channels = new ChannelList();
private String scriptPath = null;
private String scriptString = null;
private String mimeType = null;
private String language = null;
private Logger scriptLogger = null; // NOPMD
private CurnScriptObjects scriptObjects = null;
private boolean allowEmbeddedHTML = false;
/**
* For logging
*/
private static final Logger log = new Logger(ScriptOutputHandler.class);
/*----------------------------------------------------------------------*\
Constructor
\*----------------------------------------------------------------------*/
/**
* Construct a new <tt>ScriptOutputHandler</tt>.
*/
public ScriptOutputHandler()
{
// Nothing to do.
}
/*----------------------------------------------------------------------*\
Public Methods
\*----------------------------------------------------------------------*/
/**
* Initializes the output handler for another set of RSS channels.
*
* @param config the parsed <i>curn</i> configuration data
* @param cfgHandler the <tt>ConfiguredOutputHandler</tt> wrapper
* containing this object; the wrapper has some useful
* metadata, such as the object's configuration section
* name and extra variables.
*
* @throws ConfigurationException configuration error
* @throws CurnException some other initialization error
*/
public final void initOutputHandler(CurnConfig config,
ConfiguredOutputHandler cfgHandler)
throws ConfigurationException,
CurnException
{
// Parse handler-specific configuration variables
String section = cfgHandler.getSectionName();
try
{
if (section != null)
{
scriptPath = config.getConfigurationValue(section, "Script");
language = config.getConfigurationValue(section, "Language");
allowEmbeddedHTML =
config.getOptionalBooleanValue
(section,
CurnConfig.CFG_ALLOW_EMBEDDED_HTML,
false);
}
}
catch (NoSuchSectionException ex)
{
throw new ConfigurationException (ex);
}
// Verify that the script exists.
File scriptFile = CurnUtil.mapConfiguredPathName (scriptPath);
if (! scriptFile.exists())
{
scriptPath = null;
throw new ConfigurationException(section,
"Script file \"" +
scriptFile.getPath() +
"\" does not exist.");
}
if (! scriptFile.isFile())
{
scriptPath = null;
throw new ConfigurationException(section,
"Script file \"" +
scriptFile.getPath() +
"\" is not a regular file.");
}
// Allocate the script engine manager.
try
{
scriptManager = new ScriptEngineManager();
}
catch (Throwable ex)
{
throw new CurnException(ex);
}
// Next, get the scripting engine itself.
try
{
scriptEngine = scriptManager.getEngineByName(language);
}
catch (Throwable ex)
{
throw new CurnException("Unable to load scripting engine for \"" +
language + "\" language",
ex);
}
// Set up a logger for the script. The logger name can't have dots
// in it, because the underlying logging API strips them out,
// thinking they're class/package delimiters. That means we have to
// strip the extension or change it to something else. Since the
// extension conveys information (i.e., the language), we just
// convert it to an underscore.
StringBuilder scriptLoggerName = new StringBuilder(128);
String scriptName = scriptFile.getName();
scriptLoggerName.append(FileUtil.getFileNameNoExtension(scriptName));
scriptLoggerName.append('_');
scriptLoggerName.append(FileUtil.getFileNameExtension(scriptName));
scriptLogger = new Logger(scriptLoggerName.toString());
// Declare the script object. We'll fill it partially now; the rest
// will be filled later. Also, for backward compatibility, register
// individual BSF beans.
this.scriptObjects = new CurnScriptObjects();
try
{
scriptEngine.put("curn", scriptObjects);
}
catch (Throwable ex)
{
throw new CurnException ("Can't register script 'curn' object",
ex);
}
scriptObjects.config = config;
scriptObjects.configSection = section;
scriptObjects.logger = scriptLogger;
// Load the contents of the script into an in-memory buffer.
scriptString = loadScript(scriptFile);
channels.clear();
}
/**
* Display the list of <tt>RSSItem</tt> news items to whatever output
* is defined for the underlying class. This handler simply buffers up
* the channel, so that {@link #flush} can pass all the channels to the
* script.
*
* @param channel The channel containing the items to emit. The method
* should emit all the items in the channel; the caller
* is responsible for clearing out any items that should
* not be seen.
* @param feedInfo Information about the feed, from the configuration
*
* @throws CurnException unable to write output
*/
public final void displayChannel(RSSChannel channel,
FeedInfo feedInfo)
throws CurnException
{
// Save the channel.
if (! allowEmbeddedHTML)
channel.stripHTML();
channels.add (new ChannelWrapper (channel, feedInfo));
}
/**
* Flush any buffered-up output.
*
* @throws CurnException unable to write output
*/
public final void flush() throws CurnException
{
try
{
// Put the channels and output path in the global object.
scriptObjects.channels = channels;
scriptObjects.outputPath = getOutputFile().getPath();
// Run the script
log.debug ("Invoking " + scriptPath);
scriptEngine.eval(scriptString);
// Handle the MIME type.
mimeType = scriptObjects.mimeType;
}
catch (ScriptException ex)
{
Throwable realException = ex.getCause();
if (ex == null)
realException = ex;
log.error ("Error interacting with scripting framework",
realException);
throw new CurnException (Constants.BUNDLE_NAME,
"ScriptOutputHandler.bsfError",
"Error interacting with scripting " +
"framework: {0}",
new Object[] {ex.getMessage()},
realException);
}
}
/**
* Get the content (i.e., MIME) type for output produced by this output
* handler.
*
* @return the content type
*/
public final String getContentType()
{
return mimeType;
}
/**
* Register additional scripting language engines that are not
* supported by this class. By default, this method does nothing.
* Subclasses that wish to register additional BSF scripting engine
* bindings should override this method and use
* <tt>BSFManager.registerScriptingEngine()</tt> to register the
* engined. See the class documentation, above, for additional details.
*
* @throws CurnException on error
*
* @deprecated as of <i>curn</i> 3.1.
*/
public void registerAdditionalScriptingEngines()
throws CurnException
{
// Nothing to do.
}
/*----------------------------------------------------------------------*\
Private Methods
\*----------------------------------------------------------------------*/
/**
* Load the contents of the external script (any file, really) into an
* in-memory buffer.
*
* @param scriptFile the script file
*
* @return the string representing the loaded script
*
* @throws CurnException on error
*/
private String loadScript (File scriptFile)
throws CurnException
{
try
{
Reader r = new BufferedReader (new FileReader (scriptFile));
StringWriter w = new StringWriter();
int c;
while ((c = r.read()) != -1)
w.write (c);
r.close();
return w.toString();
}
catch (IOException ex)
{
throw new CurnException (Constants.BUNDLE_NAME,
"ScriptOutputHandler.cantLoadScript",
"Failed to load script \"{0}\" into " +
"memory.",
new Object[] {scriptFile.getPath()},
ex);
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver10;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import java.util.List;
import com.google.common.collect.ImmutableList;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFTableStatsReplyVer10 implements OFTableStatsReply {
private static final Logger logger = LoggerFactory.getLogger(OFTableStatsReplyVer10.class);
// version: 1.0
final static byte WIRE_VERSION = 1;
final static int MINIMUM_LENGTH = 12;
private final static long DEFAULT_XID = 0x0L;
private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of();
private final static List<OFTableStatsEntry> DEFAULT_ENTRIES = ImmutableList.<OFTableStatsEntry>of();
// OF message fields
private final long xid;
private final Set<OFStatsReplyFlags> flags;
private final List<OFTableStatsEntry> entries;
//
// Immutable default instance
final static OFTableStatsReplyVer10 DEFAULT = new OFTableStatsReplyVer10(
DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_ENTRIES
);
// package private constructor - used by readers, builders, and factory
OFTableStatsReplyVer10(long xid, Set<OFStatsReplyFlags> flags, List<OFTableStatsEntry> entries) {
if(flags == null) {
throw new NullPointerException("OFTableStatsReplyVer10: property flags cannot be null");
}
if(entries == null) {
throw new NullPointerException("OFTableStatsReplyVer10: property entries cannot be null");
}
this.xid = xid;
this.flags = flags;
this.entries = entries;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
@Override
public OFType getType() {
return OFType.STATS_REPLY;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.TABLE;
}
@Override
public Set<OFStatsReplyFlags> getFlags() {
return flags;
}
@Override
public List<OFTableStatsEntry> getEntries() {
return entries;
}
public OFTableStatsReply.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFTableStatsReply.Builder {
final OFTableStatsReplyVer10 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean flagsSet;
private Set<OFStatsReplyFlags> flags;
private boolean entriesSet;
private List<OFTableStatsEntry> entries;
BuilderWithParent(OFTableStatsReplyVer10 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
@Override
public OFType getType() {
return OFType.STATS_REPLY;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFTableStatsReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.TABLE;
}
@Override
public Set<OFStatsReplyFlags> getFlags() {
return flags;
}
@Override
public OFTableStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public List<OFTableStatsEntry> getEntries() {
return entries;
}
@Override
public OFTableStatsReply.Builder setEntries(List<OFTableStatsEntry> entries) {
this.entries = entries;
this.entriesSet = true;
return this;
}
@Override
public OFTableStatsReply build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
List<OFTableStatsEntry> entries = this.entriesSet ? this.entries : parentMessage.entries;
if(entries == null)
throw new NullPointerException("Property entries must not be null");
//
return new OFTableStatsReplyVer10(
xid,
flags,
entries
);
}
}
static class Builder implements OFTableStatsReply.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean flagsSet;
private Set<OFStatsReplyFlags> flags;
private boolean entriesSet;
private List<OFTableStatsEntry> entries;
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
@Override
public OFType getType() {
return OFType.STATS_REPLY;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFTableStatsReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.TABLE;
}
@Override
public Set<OFStatsReplyFlags> getFlags() {
return flags;
}
@Override
public OFTableStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public List<OFTableStatsEntry> getEntries() {
return entries;
}
@Override
public OFTableStatsReply.Builder setEntries(List<OFTableStatsEntry> entries) {
this.entries = entries;
this.entriesSet = true;
return this;
}
//
@Override
public OFTableStatsReply build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
List<OFTableStatsEntry> entries = this.entriesSet ? this.entries : DEFAULT_ENTRIES;
if(entries == null)
throw new NullPointerException("Property entries must not be null");
return new OFTableStatsReplyVer10(
xid,
flags,
entries
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFTableStatsReply> {
@Override
public OFTableStatsReply readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 1
byte version = bb.readByte();
if(version != (byte) 0x1)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_10(1), got="+version);
// fixed value property type == 17
byte type = bb.readByte();
if(type != (byte) 0x11)
throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(17), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property statsType == 3
short statsType = bb.readShort();
if(statsType != (short) 0x3)
throw new OFParseError("Wrong statsType: Expected=OFStatsType.TABLE(3), got="+statsType);
Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer10.readFrom(bb);
List<OFTableStatsEntry> entries = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFTableStatsEntryVer10.READER);
OFTableStatsReplyVer10 tableStatsReplyVer10 = new OFTableStatsReplyVer10(
xid,
flags,
entries
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", tableStatsReplyVer10);
return tableStatsReplyVer10;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFTableStatsReplyVer10Funnel FUNNEL = new OFTableStatsReplyVer10Funnel();
static class OFTableStatsReplyVer10Funnel implements Funnel<OFTableStatsReplyVer10> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFTableStatsReplyVer10 message, PrimitiveSink sink) {
// fixed value property version = 1
sink.putByte((byte) 0x1);
// fixed value property type = 17
sink.putByte((byte) 0x11);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property statsType = 3
sink.putShort((short) 0x3);
OFStatsReplyFlagsSerializerVer10.putTo(message.flags, sink);
FunnelUtils.putList(message.entries, sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFTableStatsReplyVer10> {
@Override
public void write(ChannelBuffer bb, OFTableStatsReplyVer10 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 1
bb.writeByte((byte) 0x1);
// fixed value property type = 17
bb.writeByte((byte) 0x11);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property statsType = 3
bb.writeShort((short) 0x3);
OFStatsReplyFlagsSerializerVer10.writeTo(bb, message.flags);
ChannelUtils.writeList(bb, message.entries);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFTableStatsReplyVer10(");
b.append("xid=").append(xid);
b.append(", ");
b.append("flags=").append(flags);
b.append(", ");
b.append("entries=").append(entries);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFTableStatsReplyVer10 other = (OFTableStatsReplyVer10) obj;
if( xid != other.xid)
return false;
if (flags == null) {
if (other.flags != null)
return false;
} else if (!flags.equals(other.flags))
return false;
if (entries == null) {
if (other.entries != null)
return false;
} else if (!entries.equals(other.entries))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((flags == null) ? 0 : flags.hashCode());
result = prime * result + ((entries == null) ? 0 : entries.hashCode());
return result;
}
}
| |
/*
* Copyright 2013 Romain Gilles
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.javabits.yar.guice.osgi;
import com.google.common.collect.ImmutableList;
import com.google.inject.*;
import com.google.inject.Module;
import com.google.inject.name.Names;
import org.javabits.yar.BlockingSupplierRegistry;
import org.javabits.yar.guice.RegistrationHandler;
import org.javabits.yar.guice.RegistryListenerHandler;
import org.osgi.framework.*;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Arrays.asList;
import static org.javabits.yar.guice.YarGuices.newRegistryDeclarationModule;
/**
* This class provides utility methods to help you to handle Guice Injector creation.
* <h2>Injector creation</h2>
* <p>
* You have several {@code YarGuices.newInjector(...)} methods that try to mimic the {@code Guice.createInjector(...)}
* methods.
* <pre>
* YarGuices.newInjector(bundleContext, PRODUCTION
* , new AbstractModule() {...}
* , new AbstractModule() {...}
* , ...
* , new AbstractRegistryModule() {...}
* , new AbstractRegistryModule() {...}
* , ...
* , new RegistryModule() {...});
* </pre>
* This methods will chain the creation of the injector and the
* {@link #start(org.osgi.framework.BundleContext, com.google.inject.Injector)} method to initialized Yar properly.
* </p>
* <p>
* <b>Warning:</b> You must add one and only one instance of {@link org.javabits.yar.guice.RegistryModule}
* but as many as you want {@link org.javabits.yar.guice.AbstractRegistryModule}.
* </p>
* <p>
* To create your injector by your self you must add to the list of modules provided to the {@code Guice#createInjector()} method the
* injector provided by the {@link YarOSGis#newYarOSGiModule(org.osgi.framework.BundleContext)}. This module binds:
* <ul>
* <li>the {@code Bundle}</li>
* <li>the {@code BundleContext}</li>
* <li>the {@code Registry}</li>
* <li>the {@code BlockingSupplierRegistry}</li>
* </ul>
* Then you have the start the registry by calling the {@code start(...)} method.
* </p>
*
* @author Romain Gilles
*/
public final class YarOSGis {
private static final String SERVICE_REGISTRY_ERROR_MESSAGE = "no BlockingSupplierRegistry service reference found in OSGi service registry";
public static final Key<BundleListener> CLEANUP_BUNDLE_LISTENER = Key.get(BundleListener.class, Names.named("cleanup"));
private YarOSGis() {
throw new AssertionError("not for you!");
}
public static Injector newInjector(BundleContext bundleContext, Module... modules) {
return start(bundleContext, Guice.createInjector(getModules(bundleContext, asList(modules))));
}
public static Injector newInjector(BundleContext bundleContext, Stage stage, Module... modules) {
return start(bundleContext, Guice.createInjector(stage, getModules(bundleContext, asList(modules))));
}
public static Injector newInjector(BundleContext bundleContext, Iterable<Module> modules) {
return start(bundleContext, Guice.createInjector(getModules(bundleContext, modules)));
}
public static Injector newInjector(BundleContext bundleContext, Stage stage, Iterable<Module> modules) {
return start(bundleContext, Guice.createInjector(stage, getModules(bundleContext, modules)));
}
/**
* The start method is responsible to 'start' the given injector.
* <p>More formally, it:
* <ul>
* <li>registers the injector has an OSGi</li>
* <li>Gets the inject supplier registration handler and registry listener handle</li>
* <li>saves the Handlers into the OSGi registry.</li>
* <li>Init the Handlers.</li>
* <li>Add cleaner to remove all the registered suppliers and listeners on bundle shutdown.</li>
* </ul>
* </p>
* <p><b>Warning:</b>This injector must have been created with
* one and only one {@link org.javabits.yar.guice.RegistryModule}
* to ensure bind registry handlers for Guice. But you can add/use as many as you want
* {@link org.javabits.yar.guice.AbstractRegistryModule}.</p>
*
* @param bundleContext the bundle context from where the injector is created.
* @param injector the injector that must be started.
* @return the given injector.
*/
public static Injector start(BundleContext bundleContext, Injector injector) {
registerInjector(bundleContext, injector);
RegistrationHandler registrationHandler = getRegistrationHandler(injector);
registerRegistrationHandler(bundleContext, registrationHandler);
RegistryListenerHandler registryListenerHandler = getRegistryListenerHandler(injector);
registerListenerHandler(bundleContext, registryListenerHandler);
attachStoppingListener(bundleContext, injector);
initHandlers(registrationHandler, registryListenerHandler);
return injector;
}
private static void initHandlers(RegistrationHandler registrationHandler, RegistryListenerHandler registryListenerHandler) {
registrationHandler.init();
registryListenerHandler.init();
}
private static void registerInjector(BundleContext bundleContext, Injector injector) {
bundleContext.registerService(Injector.class, injector, null);
}
private static ServiceRegistration<RegistrationHandler> registerRegistrationHandler(BundleContext bundleContext, RegistrationHandler registrationHandler) {
return bundleContext.registerService(RegistrationHandler.class, registrationHandler, null);
}
private static RegistrationHandler getRegistrationHandler(Injector injector) {
return injector.getInstance(RegistrationHandler.class);
}
private static ServiceRegistration<RegistryListenerHandler> registerListenerHandler(BundleContext bundleContext, RegistryListenerHandler registryListenerHandler) {
return bundleContext.registerService(RegistryListenerHandler.class, registryListenerHandler, null);
}
private static RegistryListenerHandler getRegistryListenerHandler(Injector injector) {
return injector.getInstance(RegistryListenerHandler.class);
}
private static void attachStoppingListener(BundleContext bundleContext, Injector injector) {
BundleListener listener = getCleanupBundleListener(injector);
bundleContext.addBundleListener(listener);
}
/**
* Returns the bundle listener responsible to cleanup any remaining
* registry entries associated to current bundle.
* <p>This method can be use with cautions when you want to handle the life-cycle
* of the injector associated to a bundle without going to a bundle restart. It allow
* you to remove the bundle listener to avoid some leaks.</p>
*
* @param injector associated to the current bundle
* @return the cleanup bundle listener.
*/
public static BundleListener getCleanupBundleListener(Injector injector) {
return injector.getInstance(CLEANUP_BUNDLE_LISTENER);
}
private static Iterable<Module> getModules(BundleContext bundleContext, Iterable<Module> modules) {
ImmutableList.Builder<Module> modulesBuilder = ImmutableList.builder();
modulesBuilder.add(newYarOSGiModule(bundleContext));
modulesBuilder.addAll(modules);
return modulesBuilder.build();
}
/**
* Create a new module that bind the OSGi element: {@code Bundle} and {@code BundleContext}, and the bind the Yar's
* elements: {@code Registry} and {@code BlockingSupplierRegistry}.
* This module is required to make Yar works properly in the registry.
* This method return an instance of {@code AbstractModule} and not an instance of {@link org.javabits.yar.guice.RegistryModule}.
*
* @param bundleContext the bundle context associated to the injector.
* @return A module that binds all the {@code Registry} interfaces + the {@code Bundle} and the {@code BundleContext}
*/
public static Module newYarOSGiModule(final BundleContext bundleContext) {
final BundleRegistry blockingSupplierRegistry = getBlockingSupplierRegistry(bundleContext);
return new AbstractModule() {
@Override
protected void configure() {
Key<BundleRegistry> registryKey = Key.get(BundleRegistry.class);
bind(registryKey).toInstance(blockingSupplierRegistry);
bind(OSGiRegistry.class).to(registryKey);
bind(CLEANUP_BUNDLE_LISTENER).to(BundleStoppingListener.class);
install(newRegistryDeclarationModule(registryKey));
install(newOSGiModule(bundleContext));
}
};
}
private static Module newOSGiModule(final BundleContext bundleContext) {
return new AbstractModule() {
@Override
protected void configure() {
bind(BundleContext.class).toInstance(bundleContext);
bind(Bundle.class).toInstance(bundleContext.getBundle());
}
};
}
private static BundleRegistry getBlockingSupplierRegistry(BundleContext bundleContext) {
ServiceReference<BlockingSupplierRegistry> serviceReference = checkNotNull(bundleContext.getServiceReference(BlockingSupplierRegistry.class)
, SERVICE_REGISTRY_ERROR_MESSAGE);
return new BundleRegistry(checkNotNull(bundleContext.getService(serviceReference), "BlockingSupplierRegistry service not available"), bundleContext.getBundle());
}
@Singleton
static class BundleStoppingListener implements SynchronousBundleListener {
private final RegistrationHandler registrationHandler;
private final RegistryListenerHandler registryListenerHandler;
private final long bundleId;
private final OSGiRegistry forwardingRegistryWrapper;
@Inject
BundleStoppingListener(RegistrationHandler registrationHandler, RegistryListenerHandler registryListenerHandler, OSGiRegistry forwardingRegistryWrapper, Bundle bundle) {
this.registrationHandler = registrationHandler;
this.registryListenerHandler = registryListenerHandler;
this.forwardingRegistryWrapper = forwardingRegistryWrapper;
this.bundleId = bundle.getBundleId();
}
@Override
public void bundleChanged(BundleEvent bundleEvent) {
if (isNotStopping(bundleEvent)) {
return;
}
clearSupplierRegistration();
clearListenerRegistration();
clearMissingRegistrations();
}
private boolean isNotStopping(BundleEvent bundleEvent) {
return BundleEvent.STOPPING != bundleEvent.getType() || bundleEvent.getBundle().getBundleId() != bundleId;
}
private void clearSupplierRegistration() {
registrationHandler.clear();
}
private void clearListenerRegistration() {
registryListenerHandler.clear();
}
private void clearMissingRegistrations() {
forwardingRegistryWrapper.clear();
}
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dx.rop.cst;
import com.android.dx.rop.type.Type;
import java.util.HashMap;
/**
* Constants that represent an arbitrary type (reference or primitive).
*/
public final class CstType extends TypedConstant {
/** {@code non-null;} map of interned types */
private static final HashMap<Type, CstType> interns =
new HashMap<Type, CstType>(100);
/** {@code non-null;} instance corresponding to the class {@code Object} */
public static final CstType OBJECT = intern(Type.OBJECT);
/** {@code non-null;} instance corresponding to the class {@code Boolean} */
public static final CstType BOOLEAN = intern(Type.BOOLEAN_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Byte} */
public static final CstType BYTE = intern(Type.BYTE_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Character} */
public static final CstType CHARACTER = intern(Type.CHARACTER_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Double} */
public static final CstType DOUBLE = intern(Type.DOUBLE_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Float} */
public static final CstType FLOAT = intern(Type.FLOAT_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Long} */
public static final CstType LONG = intern(Type.LONG_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Integer} */
public static final CstType INTEGER = intern(Type.INTEGER_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Short} */
public static final CstType SHORT = intern(Type.SHORT_CLASS);
/** {@code non-null;} instance corresponding to the class {@code Void} */
public static final CstType VOID = intern(Type.VOID_CLASS);
/** {@code non-null;} instance corresponding to the type {@code boolean[]} */
public static final CstType BOOLEAN_ARRAY = intern(Type.BOOLEAN_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code byte[]} */
public static final CstType BYTE_ARRAY = intern(Type.BYTE_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code char[]} */
public static final CstType CHAR_ARRAY = intern(Type.CHAR_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code double[]} */
public static final CstType DOUBLE_ARRAY = intern(Type.DOUBLE_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code float[]} */
public static final CstType FLOAT_ARRAY = intern(Type.FLOAT_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code long[]} */
public static final CstType LONG_ARRAY = intern(Type.LONG_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code int[]} */
public static final CstType INT_ARRAY = intern(Type.INT_ARRAY);
/** {@code non-null;} instance corresponding to the type {@code short[]} */
public static final CstType SHORT_ARRAY = intern(Type.SHORT_ARRAY);
/** {@code non-null;} the underlying type */
private final Type type;
/**
* {@code null-ok;} the type descriptor corresponding to this instance, if
* calculated
*/
private CstString descriptor;
/**
* Returns an instance of this class that represents the wrapper
* class corresponding to a given primitive type. For example, if
* given {@link Type#INT}, this method returns the class reference
* {@code java.lang.Integer}.
*
* @param primitiveType {@code non-null;} the primitive type
* @return {@code non-null;} the corresponding wrapper class
*/
public static CstType forBoxedPrimitiveType(Type primitiveType) {
switch (primitiveType.getBasicType()) {
case Type.BT_BOOLEAN: return BOOLEAN;
case Type.BT_BYTE: return BYTE;
case Type.BT_CHAR: return CHARACTER;
case Type.BT_DOUBLE: return DOUBLE;
case Type.BT_FLOAT: return FLOAT;
case Type.BT_INT: return INTEGER;
case Type.BT_LONG: return LONG;
case Type.BT_SHORT: return SHORT;
case Type.BT_VOID: return VOID;
}
throw new IllegalArgumentException("not primitive: " + primitiveType);
}
/**
* Returns an interned instance of this class for the given type.
*
* @param type {@code non-null;} the underlying type
* @return {@code non-null;} an appropriately-constructed instance
*/
public static CstType intern(Type type) {
synchronized (interns) {
CstType cst = interns.get(type);
if (cst == null) {
cst = new CstType(type);
interns.put(type, cst);
}
return cst;
}
}
/**
* Constructs an instance.
*
* @param type {@code non-null;} the underlying type
*/
public CstType(Type type) {
if (type == null) {
throw new NullPointerException("type == null");
}
if (type == type.KNOWN_NULL) {
throw new UnsupportedOperationException(
"KNOWN_NULL is not representable");
}
this.type = type;
this.descriptor = null;
}
/** {@inheritDoc} */
@Override
public boolean equals(Object other) {
if (!(other instanceof CstType)) {
return false;
}
return type == ((CstType) other).type;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return type.hashCode();
}
/** {@inheritDoc} */
@Override
protected int compareTo0(Constant other) {
String thisDescriptor = type.getDescriptor();
String otherDescriptor = ((CstType) other).type.getDescriptor();
return thisDescriptor.compareTo(otherDescriptor);
}
/** {@inheritDoc} */
@Override
public String toString() {
return "type{" + toHuman() + '}';
}
/** {@inheritDoc} */
public Type getType() {
return Type.CLASS;
}
/** {@inheritDoc} */
@Override
public String typeName() {
return "type";
}
/** {@inheritDoc} */
@Override
public boolean isCategory2() {
return false;
}
/** {@inheritDoc} */
public String toHuman() {
return type.toHuman();
}
/**
* Gets the underlying type (as opposed to the type corresponding
* to this instance as a constant, which is always
* {@code Class}).
*
* @return {@code non-null;} the type corresponding to the name
*/
public Type getClassType() {
return type;
}
/**
* Gets the type descriptor for this instance.
*
* @return {@code non-null;} the descriptor
*/
public CstString getDescriptor() {
if (descriptor == null) {
descriptor = new CstString(type.getDescriptor());
}
return descriptor;
}
/**
* Returns a human readable package name for this type, like "java.util".
* If this is an array type, this returns the package name of the array's
* component type. If this is a primitive type, this returns "default".
*/
public String getPackageName() {
// descriptor is a string like "[[Ljava/util/String;"
String descriptor = getDescriptor().getString();
int lastSlash = descriptor.lastIndexOf('/');
int lastLeftSquare = descriptor.lastIndexOf('['); // -1 unless this is an array
if (lastSlash == -1) {
return "default";
} else {
// +2 to skip the '[' and the 'L' prefix
return descriptor.substring(lastLeftSquare + 2, lastSlash).replace('/', '.');
}
}
}
| |
/*
* Copyright 2015 Terence Doerksen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.coffeeshopstudio.icegl.gl;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.IntBuffer;
/**
* Texture used by the game
*/
public class GLTexture
{
private static final int MAX_TEXTURES = 1; //per atlas we allow, for future proofing maybe
private float targetWidth = 0;
private float targetHeight = 0;
private IntBuffer t;
private int textureID = -1;
private float spriteSize = -1;
private float textureWidth = -1;
private float textureHeight = -1;
private float textureUratio = -1;
private float textureVratio = -1;
/**
* Constructor for the texture atlas we are loading - does not initialze a sprite size
* useful for loading complete images as apposed to a sprite map
* @param context active activity context
* @param textureAtlas file name of the atlas stored under drawable
* @param targetWidth width, in pixels, that the atlas was drawn in
* @param targetHeight height, in pixels, that the atlas was drawn in
*/
public GLTexture(Context context, String textureAtlas, float targetWidth, float targetHeight) {
loadFromContext(context, textureAtlas, 0.0f, targetWidth, targetHeight);
}
/**
* Constructor for the texture atlas we are loading
* @param context active activity context
* @param textureAtlas file name of the atlas stored under drawable
* @param spriteSize ideal size of each sprite
* @param targetWidth width, in pixels, that the atlas was drawn in
* @param targetHeight height, in pixels, that the atlas was drawn in
*/
public GLTexture(Context context, String textureAtlas, float spriteSize, float targetWidth, float targetHeight)
{
loadFromContext(context, textureAtlas, spriteSize, targetWidth, targetHeight);
}
/**
* Constructor for the texture atlas we are loading
* @param bmp Bitmap we are using for the texture
* @param spriteSize ideal size of each sprite
* @param targetWidth width, in pixels, that the atlas was drawn in
* @param targetHeight height, in pixels, that the atlas was drawn in
*/
public GLTexture(Bitmap bmp, float spriteSize, float targetWidth, float targetHeight) {
loadFromBmp(bmp, spriteSize, targetWidth, targetHeight);
}
private void loadFromContext(Context context, String textureAtlas, float spriteSize, float targetWidth, float targetHeight) {
if (context == null)
throw new IllegalArgumentException("NULL Context passed in for texture atlas");
Bitmap bmp = getBitmap(context, textureAtlas);
generateTexture(bmp);
loadFromBmp(bmp, spriteSize, targetWidth, targetHeight);
}
private void loadFromBmp(Bitmap bmp, float spriteSize, float targetWidth, float targetHeight) {
if (spriteSize < 0)
throw new IllegalArgumentException("Sprite size must be positive value");
if (targetHeight < 0)
throw new IllegalArgumentException("target height must be a positive value");
if (targetWidth < 0)
throw new IllegalArgumentException("target width must be positive value");
this.targetHeight = targetHeight;
this.targetWidth = targetWidth;
this.spriteSize = spriteSize;
t = IntBuffer.allocate(MAX_TEXTURES);
initTexture();
generateTexture(bmp);
}
/**
* Build our bitmap and get it's resource ID
* @param context application context
* @param atlasName name of the atlas to load in
* @return A valid Bitmap object
*/
private Bitmap getBitmap(Context context, String atlasName) {
//now create a bitmap from the drawable resource
atlasName = "drawable/" + atlasName;
// Retrieve our image from resources.
int id = context.getResources().getIdentifier(atlasName, null, context.getPackageName());
// Temporary create a bitmap
return BitmapFactory.decodeResource(context.getResources(), id);
}
/**
* Initialize our texture object
*/
private void initTexture()
{
//add our texture and initialize the buffer
GLES20.glGenTextures(1, t);
textureID = t.get(0);
}
/**
* Regenerate our texture for the passed in bitmap object
* @param bmp bitmap we will be loading in
*/
public void rebuildTexture(Bitmap bmp) {
generateTexture(bmp);
}
/**
* Generate our texture for the passed in bitmap object
* @param bmp bitmap we will be loading in
*/
private void generateTexture(Bitmap bmp) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + textureID); // Set the active texture unit to texture unit 0
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureID); // Bind the texture to this unit
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// Set wrapping mode
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
textureWidth = bmp.getWidth();
textureHeight = bmp.getHeight();
textureUratio = spriteSize / textureWidth;
textureVratio = spriteSize / textureHeight;
// We are done using the bitmap so we should recycle it.
bmp.recycle();
}
/**
* The texture ID of this atlas
* @return valid texture ID
*/
public int getTextureID() {
return textureID;
}
/**
* How wide each sprite is, measured between 0 to 1
* @return ratio of the sprites width
*/
public float getUratio() {
return textureUratio;
}
/**
* How high each sprite is, measured between 0 to 1
* @return ratio of the sprites height
*/
public float getVratio() {
return textureVratio;
}
/**
* The height of the texture that we designed for
* @return target height
*/
public float getTargetHeight()
{
return targetHeight;
}
/**
* The width of the texture that we designed for
* @return target width
*/
public float getTargetWidth()
{
return targetWidth;
}
/**
* Returns how wide our grid is measured in sprites
*
* @return how many grid units across our texture is
*/
public float getGridWith() {
return targetWidth / spriteSize;
}
/**
* Returns how tall our grid is measured in sprites
*
* @return how many grid units vertically our texture is
*/
public float getGridHeight() {
return targetHeight / spriteSize;
}
}
| |
package org.grobid.trainer.stax;
import com.ctc.wstx.stax.WstxInputFactory;
import org.apache.commons.lang3.StringUtils;
import org.codehaus.stax2.XMLStreamReader2;
import org.grobid.core.analyzers.GrobidAnalyzer;
import org.grobid.core.lang.Language;
import org.grobid.core.lexicon.NERLexicon;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLEventWriter;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.XMLEvent;
import java.io.*;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.*;
/**
* Created by lfoppiano on 29/08/16.
*/
public class INRIALeMondeCorpusStaxHandler implements StaxParserContentHandler {
private static Logger LOGGER = LoggerFactory.getLogger(INRIALeMondeCorpusStaxHandler.class);
private Writer writer;
private StringBuilder sb;
private boolean inSentence = false;
private boolean inDocument = false;
private boolean inNamedEntity = false;
private String entityType = null;
private String disambiguatedName = null;
private String entitySubType = null;
private String uri = null;
//Ignored for the moment as they are too specific
private String comment = null;
private String gender = null;
private GrobidAnalyzer analyzer = GrobidAnalyzer.getInstance();
public INRIALeMondeCorpusStaxHandler() {
this.sb = new StringBuilder();
}
public INRIALeMondeCorpusStaxHandler(Writer writer) {
this();
this.writer = writer;
}
@Override
public void onStartDocument(XMLStreamReader2 xmlStreamReader2) {
}
@Override
public void onEndDocument(XMLStreamReader2 xmlStreamReader2) {
}
/**
* -DOCSTART- followed by the document id is appended to define the start of a new document
*/
@Override
public void onStartElement(XMLStreamReader2 reader) {
final String localName = reader.getName().getLocalPart();
if (localName.equals("document")) {
inDocument = true;
sb.append("-DOCSTART- ").append(reader.getAttributeValue("", "id")).append("\n");
} else if (localName.equals("sentence")) {
inSentence = true;
} else if (localName.equals("ENAMEX")) {
inNamedEntity = true;
readOtherAttributes(reader);
}
}
private void readOtherAttributes(XMLStreamReader2 reader) {
entityType = getAttributeFiltered(reader, "type");
if (isNotBlank(entityType)) {
uri = getAttributeFiltered(reader, "uri");
entitySubType = getAttributeFiltered(reader, "sub_type");
disambiguatedName = getAttributeFiltered(reader, "name");
// if (StringUtils.equals(entityType, "Person")) {
// gender = getAttributeFiltered(reader, "gender");
// }
}
}
private String getAttributeFiltered(XMLStreamReader2 reader, String name) {
return StringUtils.equals(reader.getAttributeValue("", name), "null") ? null : reader.getAttributeValue("", name);
}
/**
* When the document is closed, add an empty line as document separator.
*/
@Override
public void onEndElement(XMLStreamReader2 reader) {
if (reader.getName().getLocalPart().equals("document")) {
try {
writer.write(sb.append("\n").toString());
} catch (IOException e) {
throw new RuntimeException();
}
sb = new StringBuilder();
} else if (reader.getName().getLocalPart().equals("sentence")) {
inSentence = false;
sb.append("\n");
} else if (reader.getName().getLocalPart().equals("ENAMEX")) {
inNamedEntity = false;
entityType = null;
entitySubType = null;
}
}
@Override
public void onCharacter(XMLStreamReader2 reader) {
if (inSentence || inNamedEntity) {
String text = reader.getText();
//text = trim(text);
if (isEmpty(text)) {
return;
}
List<String> tokens = null;
try {
tokens = analyzer.tokenize(text, new Language(Language.FR, 1.0));
} catch(Exception e) {
LOGGER.error("Tokenization failed", e);
}
if (tokens == null)
return;
for(String token : tokens) {
if (token.equals(" ") || token.equals("\t") || token.equals("\n") || token.equals("\r")) {
continue;
}
if ((inNamedEntity) && (isNotEmpty(entityType))) {
sb.append(token).append("\t").append(translate(entityType, entitySubType));
/*if (isNotEmpty(entitySubType)) {
sb.append("\t").append(entitySubType);
}*/
if (isNotBlank(disambiguatedName)) {
sb.append("\t").append(disambiguatedName);
}
if (isNotBlank(uri)) {
sb.append("\t").append(uri);
}
sb.append("\n");
} else {
sb.append(token).append("\t").append("O").append("\n");
}
}
}
}
@Override
public void onAttribute(XMLStreamReader2 reader) {
}
private String extractTagContent(XMLEventReader reader, XMLEventWriter writer) throws XMLStreamException {
XMLEvent event = reader.nextEvent();
String data = event.asCharacters().getData();
data = data != null ? data.trim() : "";
writer.add(event);
return data;
}
protected String translate(String type, String subType) {
//default
String labelOutput = "O";
String senseOutput = "";
if (StringUtils.equalsIgnoreCase(type, "Company")) {
labelOutput = NERLexicon.NER_Type.BUSINESS.getName();
senseOutput = "business/N1";
} else if (StringUtils.equalsIgnoreCase(type, "FictionCharacter")) {
labelOutput = NERLexicon.NER_Type.PERSON.getName();
} else if (StringUtils.equalsIgnoreCase(type, "organization")) {
if (StringUtils.equalsIgnoreCase(subType, "InstitutionalOrganization")) {
labelOutput = NERLexicon.NER_Type.INSTITUTION.getName();
} else if (StringUtils.endsWithIgnoreCase(subType, "company")) {
labelOutput = NERLexicon.NER_Type.BUSINESS.getName();
} else {
labelOutput = NERLexicon.NER_Type.ORGANISATION.getName();
}
} else if (StringUtils.equalsIgnoreCase(type, "Person")) {
labelOutput = NERLexicon.NER_Type.PERSON.getName();
} else if (StringUtils.equalsIgnoreCase(type, "Location")) {
labelOutput = NERLexicon.NER_Type.LOCATION.getName();
} else if (StringUtils.equalsIgnoreCase(type, "Poi")) {
labelOutput = NERLexicon.NER_Type.LOCATION.getName();
} else if (StringUtils.equalsIgnoreCase(type, "Product")) {
labelOutput = NERLexicon.NER_Type.ARTIFACT.getName();
}
return labelOutput;
}
/**
* How to use it
* <p>
* This class require a single parameter which is the input file containng the french
* corpus from Le Monde manually annotated.
* <p>
* The class will output the cONLL 2013 format in a file having the same name as the input
* suffixed with .output.
*/
public static void main(String[] args) throws IOException, XMLStreamException {
if (args.length == 0) {
System.out.println("Missing input file. First parameter.");
System.exit(-1);
}
WstxInputFactory inputFactory = new WstxInputFactory();
Writer writer = new FileWriter(args[0] + ".output");
INRIALeMondeCorpusStaxHandler inriaLeMondeCorpusStaxHandler = new INRIALeMondeCorpusStaxHandler(writer);
InputStream is = new FileInputStream(args[0]);
XMLStreamReader2 reader = (XMLStreamReader2) inputFactory.createXMLStreamReader(is);
StaxUtils.traverse(reader, inriaLeMondeCorpusStaxHandler);
writer.close();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.shard;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Assertions;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import java.io.Closeable;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
/**
* Represents a collection of global checkpoint listeners. This collection can be added to, and all listeners present at the time of an
* update will be notified together. All listeners will be notified when the shard is closed.
*/
public class GlobalCheckpointListeners implements Closeable {
/**
* A global checkpoint listener consisting of a callback that is notified when the global checkpoint is updated or the shard is closed.
*/
public interface GlobalCheckpointListener {
/**
* The executor on which the listener is notified.
*
* @return the executor
*/
Executor executor();
/**
* Callback when the global checkpoint is updated or the shard is closed. If the shard is closed, the value of the global checkpoint
* will be set to {@link org.elasticsearch.index.seqno.SequenceNumbers#UNASSIGNED_SEQ_NO} and the exception will be non-null and an
* instance of {@link IndexShardClosedException }. If the listener timed out waiting for notification then the exception will be
* non-null and an instance of {@link TimeoutException}. If the global checkpoint is updated, the exception will be null.
*
* @param globalCheckpoint the updated global checkpoint
* @param e if non-null, the shard is closed or the listener timed out
*/
void accept(long globalCheckpoint, Exception e);
}
// guarded by this
private boolean closed;
private final Map<GlobalCheckpointListener, Tuple<Long, ScheduledFuture<?>>> listeners = new LinkedHashMap<>();
private long lastKnownGlobalCheckpoint = UNASSIGNED_SEQ_NO;
private final ShardId shardId;
private final ScheduledExecutorService scheduler;
private final Logger logger;
/**
* Construct a global checkpoint listeners collection.
*
* @param shardId the shard ID on which global checkpoint updates can be listened to
* @param scheduler the executor used for scheduling timeouts
* @param logger a shard-level logger
*/
GlobalCheckpointListeners(
final ShardId shardId,
final ScheduledExecutorService scheduler,
final Logger logger) {
this.shardId = Objects.requireNonNull(shardId, "shardId");
this.scheduler = Objects.requireNonNull(scheduler, "scheduler");
this.logger = Objects.requireNonNull(logger, "logger");
}
/**
* Add a global checkpoint listener. If the global checkpoint is equal to or above the global checkpoint the listener is waiting for,
* then the listener will be asynchronously notified on the executor used to construct this collection of global checkpoint listeners.
* If the shard is closed then the listener will be asynchronously notified on the executor used to construct this collection of global
* checkpoint listeners. The listener will only be notified of at most one event, either the global checkpoint is updated above the
* global checkpoint the listener is waiting for, or the shard is closed. A listener must re-register after one of these events to
* receive subsequent events. Callers may add a timeout to be notified after if the timeout elapses. In this case, the listener will be
* notified with a {@link TimeoutException}. Passing null fo the timeout means no timeout will be associated to the listener.
*
* @param waitingForGlobalCheckpoint the current global checkpoint known to the listener
* @param listener the listener
* @param timeout the listener timeout, or null if no timeout
*/
synchronized void add(final long waitingForGlobalCheckpoint, final GlobalCheckpointListener listener, final TimeValue timeout) {
if (closed) {
notifyListener(listener, UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId));
return;
}
if (lastKnownGlobalCheckpoint >= waitingForGlobalCheckpoint) {
// notify directly
notifyListener(listener, lastKnownGlobalCheckpoint, null);
} else {
if (timeout == null) {
listeners.put(listener, Tuple.tuple(waitingForGlobalCheckpoint, null));
} else {
listeners.put(
listener,
Tuple.tuple(
waitingForGlobalCheckpoint,
scheduler.schedule(
() -> {
final boolean removed;
synchronized (this) {
/*
* We know that this listener has a timeout associated with it (otherwise we would not be
* here) so the future component of the return value from remove being null is an indication
* that we are not in the map. This can happen if a notification collected us into listeners
* to be notified and removed us from the map, and then our scheduled execution occurred
* before we could be cancelled by the notification. In this case, our listener here would
* not be in the map and we should not fire the timeout logic.
*/
removed = listeners.remove(listener) != null;
}
if (removed) {
final TimeoutException e = new TimeoutException(timeout.getStringRep());
logger.trace("global checkpoint listener timed out", e);
notifyListener(listener, UNASSIGNED_SEQ_NO, e);
}
},
timeout.nanos(),
TimeUnit.NANOSECONDS)));
}
}
}
@Override
public synchronized void close() throws IOException {
if (closed) {
assert listeners.isEmpty() : listeners;
}
closed = true;
notifyListeners(UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId));
}
/**
* The number of listeners currently pending for notification.
*
* @return the number of listeners pending notification
*/
synchronized int pendingListeners() {
return listeners.size();
}
/**
* The scheduled future for a listener that has a timeout associated with it, otherwise null.
*
* @param listener the listener to get the scheduled future for
* @return a scheduled future representing the timeout future for the listener, otherwise null
*/
synchronized ScheduledFuture<?> getTimeoutFuture(final GlobalCheckpointListener listener) {
return listeners.get(listener).v2();
}
/**
* Invoke to notify all registered listeners of an updated global checkpoint.
*
* @param globalCheckpoint the updated global checkpoint
*/
synchronized void globalCheckpointUpdated(final long globalCheckpoint) {
assert globalCheckpoint >= NO_OPS_PERFORMED;
assert globalCheckpoint > lastKnownGlobalCheckpoint
: "updated global checkpoint [" + globalCheckpoint + "]"
+ " is not more than the last known global checkpoint [" + lastKnownGlobalCheckpoint + "]";
lastKnownGlobalCheckpoint = globalCheckpoint;
notifyListeners(globalCheckpoint, null);
}
private void notifyListeners(final long globalCheckpoint, final IndexShardClosedException e) {
assert Thread.holdsLock(this) : Thread.currentThread();
// early return if there are no listeners
if (listeners.isEmpty()) {
return;
}
final Map<GlobalCheckpointListener, Tuple<Long, ScheduledFuture<?>>> listenersToNotify;
if (globalCheckpoint != UNASSIGNED_SEQ_NO) {
listenersToNotify =
listeners
.entrySet()
.stream()
.filter(entry -> entry.getValue().v1() <= globalCheckpoint)
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
listenersToNotify.keySet().forEach(listeners::remove);
} else {
listenersToNotify = new HashMap<>(listeners);
listeners.clear();
}
if (listenersToNotify.isEmpty() == false) {
listenersToNotify
.forEach((listener, t) -> {
/*
* We do not want to interrupt any timeouts that fired, these will detect that the listener has been notified and not
* trigger the timeout.
*/
FutureUtils.cancel(t.v2());
notifyListener(listener, globalCheckpoint, e);
});
}
}
private void notifyListener(final GlobalCheckpointListener listener, final long globalCheckpoint, final Exception e) {
assertNotification(globalCheckpoint, e);
listener.executor().execute(() -> {
try {
listener.accept(globalCheckpoint, e);
} catch (final Exception caught) {
if (globalCheckpoint != UNASSIGNED_SEQ_NO) {
logger.warn(
new ParameterizedMessage(
"error notifying global checkpoint listener of updated global checkpoint [{}]",
globalCheckpoint),
caught);
} else if (e instanceof IndexShardClosedException) {
logger.warn("error notifying global checkpoint listener of closed shard", caught);
} else {
logger.warn("error notifying global checkpoint listener of timeout", caught);
}
}
});
}
private void assertNotification(final long globalCheckpoint, final Exception e) {
if (Assertions.ENABLED) {
assert globalCheckpoint >= UNASSIGNED_SEQ_NO : globalCheckpoint;
if (globalCheckpoint != UNASSIGNED_SEQ_NO) {
assert e == null : e;
} else {
assert e != null;
assert e instanceof IndexShardClosedException || e instanceof TimeoutException : e;
}
}
}
}
| |
/*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.security.server.util;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
/**
* PathMatcher implementation for Ant-style path patterns.
* <p/>
* This code has been borrowed from <a href="http://camel.apache.org">Apache Camel</a>.
* <p/>
*/
public class AntPathMatcher {
/**
* Default path separator: "/"
*/
public static final String DEFAULT_PATH_SEPARATOR = "/";
private String pathSeparator = DEFAULT_PATH_SEPARATOR;
/**
* Set the path separator to use for pattern parsing. Default is "/", as in
* Ant.
*/
public void setPathSeparator(final String pathSeparator) {
this.pathSeparator = pathSeparator != null ? pathSeparator : DEFAULT_PATH_SEPARATOR;
}
public boolean isPattern(final String path) {
return path.indexOf('*') != -1 || path.indexOf('?') != -1;
}
public boolean match(final String pattern, final String path) {
return doMatch(pattern, path, true);
}
public boolean matchStart(final String pattern, final String path) {
return doMatch(pattern, path, false);
}
/**
* Actually match the given <code>path</code> against the given
* <code>pattern</code>.
* @param pattern the pattern to match against
* @param path the path String to test
* @param fullMatch whether a full pattern match is required (else a pattern
* match as far as the given base path goes is sufficient)
* @return <code>true</code> if the supplied <code>path</code> matched,
* <code>false</code> if it didn't
*/
protected boolean doMatch(String pattern, String path, boolean fullMatch) {
if (path.startsWith(this.pathSeparator) != pattern.startsWith(this.pathSeparator)) {
return false;
}
String[] pattDirs = tokenizeToStringArray(pattern, this.pathSeparator);
String[] pathDirs = tokenizeToStringArray(path, this.pathSeparator);
int pattIdxStart = 0;
int pattIdxEnd = pattDirs.length - 1;
int pathIdxStart = 0;
int pathIdxEnd = pathDirs.length - 1;
// Match all elements up to the first **
while (pattIdxStart <= pattIdxEnd && pathIdxStart <= pathIdxEnd) {
String patDir = pattDirs[pattIdxStart];
if ("**".equals(patDir)) {
break;
}
if (!matchStrings(patDir, pathDirs[pathIdxStart])) {
return false;
}
pattIdxStart++;
pathIdxStart++;
}
if (pathIdxStart > pathIdxEnd) {
// Path is exhausted, only match if rest of pattern is * or **'s
if (pattIdxStart > pattIdxEnd) {
return pattern.endsWith(this.pathSeparator) ? path.endsWith(this.pathSeparator) : !path
.endsWith(this.pathSeparator);
}
if (!fullMatch) {
return true;
}
if (pattIdxStart == pattIdxEnd && pattDirs[pattIdxStart].equals("*")
&& path.endsWith(this.pathSeparator)) {
return true;
}
for (int i = pattIdxStart; i <= pattIdxEnd; i++) {
if (!pattDirs[i].equals("**")) {
return false;
}
}
return true;
} else if (pattIdxStart > pattIdxEnd) {
// String not exhausted, but pattern is. Failure.
return false;
} else if (!fullMatch && "**".equals(pattDirs[pattIdxStart])) {
// Path start definitely matches due to "**" part in pattern.
return true;
}
// up to last '**'
while (pattIdxStart <= pattIdxEnd && pathIdxStart <= pathIdxEnd) {
String patDir = pattDirs[pattIdxEnd];
if (patDir.equals("**")) {
break;
}
if (!matchStrings(patDir, pathDirs[pathIdxEnd])) {
return false;
}
pattIdxEnd--;
pathIdxEnd--;
}
if (pathIdxStart > pathIdxEnd) {
// String is exhausted
for (int i = pattIdxStart; i <= pattIdxEnd; i++) {
if (!pattDirs[i].equals("**")) {
return false;
}
}
return true;
}
while (pattIdxStart != pattIdxEnd && pathIdxStart <= pathIdxEnd) {
int patIdxTmp = -1;
for (int i = pattIdxStart + 1; i <= pattIdxEnd; i++) {
if (pattDirs[i].equals("**")) {
patIdxTmp = i;
break;
}
}
if (patIdxTmp == pattIdxStart + 1) {
// '**/**' situation, so skip one
pattIdxStart++;
continue;
}
// Find the pattern between padIdxStart & padIdxTmp in str between
// strIdxStart & strIdxEnd
int patLength = patIdxTmp - pattIdxStart - 1;
int strLength = pathIdxEnd - pathIdxStart + 1;
int foundIdx = -1;
strLoop:
for (int i = 0; i <= strLength - patLength; i++) {
for (int j = 0; j < patLength; j++) {
String subPat = pattDirs[pattIdxStart + j + 1];
String subStr = pathDirs[pathIdxStart + i + j];
if (!matchStrings(subPat, subStr)) {
continue strLoop;
}
}
foundIdx = pathIdxStart + i;
break;
}
if (foundIdx == -1) {
return false;
}
pattIdxStart = patIdxTmp;
pathIdxStart = foundIdx + patLength;
}
for (int i = pattIdxStart; i <= pattIdxEnd; i++) {
if (!pattDirs[i].equals("**")) {
return false;
}
}
return true;
}
/**
* Tests whether or not a string matches against a pattern. The pattern may
* contain two special characters:<br>
* '*' means zero or more characters<br>
* '?' means one and only one character
* @param pattern pattern to match against. Must not be <code>null</code>.
* @param str string which must be matched against the pattern. Must not be
* <code>null</code>.
* @return <code>true</code> if the string matches against the pattern, or
* <code>false</code> otherwise.
*/
private boolean matchStrings(String pattern, String str) {
char[] patArr = pattern.toCharArray();
char[] strArr = str.toCharArray();
int patIdxStart = 0;
int patIdxEnd = patArr.length - 1;
int strIdxStart = 0;
int strIdxEnd = strArr.length - 1;
char ch;
boolean containsStar = false;
for (char c : patArr) {
if (c == '*') {
containsStar = true;
break;
}
}
if (!containsStar) {
// No '*'s, so we make a shortcut
if (patIdxEnd != strIdxEnd) {
return false; // Pattern and string do not have the same size
}
for (int i = 0; i <= patIdxEnd; i++) {
ch = patArr[i];
if (ch != '?') {
if (ch != strArr[i]) {
return false;
// Character mismatch
}
}
}
return true; // String matches against pattern
}
if (patIdxEnd == 0) {
return true; // Pattern contains only '*', which matches anything
}
// Process characters before first star
while ((ch = patArr[patIdxStart]) != '*' && strIdxStart <= strIdxEnd) {
if (ch != '?') {
if (ch != strArr[strIdxStart]) {
return false;
// Character mismatch
}
}
patIdxStart++;
strIdxStart++;
}
if (strIdxStart > strIdxEnd) {
// All characters in the string are used. Check if only '*'s are
// left in the pattern. If so, we succeeded. Otherwise failure.
for (int i = patIdxStart; i <= patIdxEnd; i++) {
if (patArr[i] != '*') {
return false;
}
}
return true;
}
// Process characters after last star
while ((ch = patArr[patIdxEnd]) != '*' && strIdxStart <= strIdxEnd) {
if (ch != '?') {
if (ch != strArr[strIdxEnd]) {
return false;
// Character mismatch
}
}
patIdxEnd--;
strIdxEnd--;
}
if (strIdxStart > strIdxEnd) {
// All characters in the string are used. Check if only '*'s are
// left in the pattern. If so, we succeeded. Otherwise failure.
for (int i = patIdxStart; i <= patIdxEnd; i++) {
if (patArr[i] != '*') {
return false;
}
}
return true;
}
// process pattern between stars. padIdxStart and patIdxEnd point
// always to a '*'.
while (patIdxStart != patIdxEnd && strIdxStart <= strIdxEnd) {
int patIdxTmp = -1;
for (int i = patIdxStart + 1; i <= patIdxEnd; i++) {
if (patArr[i] == '*') {
patIdxTmp = i;
break;
}
}
if (patIdxTmp == patIdxStart + 1) {
// Two stars next to each other, skip the first one.
patIdxStart++;
continue;
}
// Find the pattern between padIdxStart & padIdxTmp in str between
// strIdxStart & strIdxEnd
int patLength = patIdxTmp - patIdxStart - 1;
int strLength = strIdxEnd - strIdxStart + 1;
int foundIdx = -1;
strLoop:
for (int i = 0; i <= strLength - patLength; i++) {
for (int j = 0; j < patLength; j++) {
ch = patArr[patIdxStart + j + 1];
if (ch != '?') {
if (ch != strArr[strIdxStart + i + j]) {
continue strLoop;
}
}
}
foundIdx = strIdxStart + i;
break;
}
if (foundIdx == -1) {
return false;
}
patIdxStart = patIdxTmp;
strIdxStart = foundIdx + patLength;
}
// All characters in the string are used. Check if only '*'s are left
// in the pattern. If so, we succeeded. Otherwise failure.
for (int i = patIdxStart; i <= patIdxEnd; i++) {
if (patArr[i] != '*') {
return false;
}
}
return true;
}
/**
* Given a pattern and a full path, determine the pattern-mapped part.
* <p/>
* For example:
* <ul>
* <li>'<code>/docs/cvs/commit.html</code>' and '
* <code>/docs/cvs/commit.html</code> -> ''</li>
* <li>'<code>/docs/*</code>' and '<code>/docs/cvs/commit</code> -> '
* <code>cvs/commit</code>'</li>
* <li>'<code>/docs/cvs/*.html</code>' and '
* <code>/docs/cvs/commit.html</code> -> '<code>commit.html</code>'</li>
* <li>'<code>/docs/**</code>' and '<code>/docs/cvs/commit</code> -> '
* <code>cvs/commit</code>'</li>
* <li>'<code>/docs/**\/*.html</code>' and '
* <code>/docs/cvs/commit.html</code> -> '<code>cvs/commit.html</code>'</li>
* <li>'<code>/*.html</code>' and '<code>/docs/cvs/commit.html</code> -> '
* <code>docs/cvs/commit.html</code>'</li>
* <li>'<code>*.html</code>' and '<code>/docs/cvs/commit.html</code> -> '
* <code>/docs/cvs/commit.html</code>'</li>
* <li>'<code>*</code>' and '<code>/docs/cvs/commit.html</code> -> '
* <code>/docs/cvs/commit.html</code>'</li>
* </ul>
* <p/>
* Assumes that {@link #match} returns <code>true</code> for '
* <code>pattern</code>' and '<code>path</code>', but does
* <strong>not</strong> enforce this.
*/
public String extractPathWithinPattern(String pattern, String path) {
final String[] patternParts = tokenizeToStringArray(pattern, this.pathSeparator);
final String[] pathParts = tokenizeToStringArray(path, this.pathSeparator);
final StringBuilder buffer = new StringBuilder();
// Add any path parts that have a wildcarded pattern part.
int puts = 0;
for (int i = 0; i < patternParts.length; i++) {
final String patternPart = patternParts[i];
if ((patternPart.indexOf('*') > -1 || patternPart.indexOf('?') > -1) && pathParts.length >= i + 1) {
if (puts > 0 || (i == 0 && !pattern.startsWith(this.pathSeparator))) {
buffer.append(this.pathSeparator);
}
buffer.append(pathParts[i]);
puts++;
}
}
// Append any trailing path parts.
for (int i = patternParts.length; i < pathParts.length; i++) {
if (puts > 0 || i > 0) {
buffer.append(this.pathSeparator);
}
buffer.append(pathParts[i]);
}
return buffer.toString();
}
/**
* Tokenize the given String into a String array via a StringTokenizer.
* Trims tokens and omits empty tokens.
* <p/>
* The given delimiters string is supposed to consist of any number of
* delimiter characters. Each of those characters can be used to separate
* tokens. A delimiter is always a single character; for multi-character
* delimiters, consider using <code>delimitedListToStringArray</code>
* @param str the String to tokenize
* @param delimiters the delimiter characters, assembled as String (each of
* those characters is individually considered as delimiter).
* @return an array of the tokens
* @see java.util.StringTokenizer
* @see java.lang.String#trim()
*/
public static String[] tokenizeToStringArray(String str, String delimiters) {
if (str == null) {
return null;
}
final StringTokenizer st = new StringTokenizer(str, delimiters);
final List<String> tokens = new ArrayList<String>();
while (st.hasMoreTokens()) {
final String token = st.nextToken().trim();
if (token.length() > 0) {
tokens.add(token);
}
}
return tokens.toArray(new String[tokens.size()]);
}
}
| |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.datastore;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.google.cloud.Timestamp;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.junit.Before;
import org.junit.Test;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class BaseEntityTest {
private static final Blob BLOB = Blob.copyFrom(new byte[]{1, 2});
private static final Timestamp TIMESTAMP = Timestamp.now();
private static final LatLng LAT_LNG = new LatLng(37.422035, -122.084124);
private static final Key KEY = Key.newBuilder("ds1", "k1", "n1").build();
private static final Entity ENTITY = Entity.newBuilder(KEY).set("name", "foo").build();
private static final IncompleteKey INCOMPLETE_KEY = IncompleteKey.newBuilder("ds1", "k1").build();
private static final FullEntity<IncompleteKey> PARTIAL_ENTITY =
Entity.newBuilder(INCOMPLETE_KEY).build();
private Builder builder;
private class Builder extends BaseEntity.Builder<Key, Builder> {
@Override public BaseEntity<Key> build() {
return new BaseEntity<Key>(this) {};
}
}
@Before
public void setUp() {
builder = new Builder();
builder.set("blob", BLOB).set("boolean", true).set("timestamp", TIMESTAMP);
builder.set("double", 1.25).set("key", KEY).set("string", "hello world");
builder.set("long", 125).setNull("null").set("entity", ENTITY).set("latLng", LAT_LNG);
builder.set("partialEntity", PARTIAL_ENTITY).set("stringValue", StringValue.of("bla"));
builder.set("list1", NullValue.of(), StringValue.of("foo"), LatLngValue.of(LAT_LNG));
builder.set("list2", ImmutableList.of(LongValue.of(10), DoubleValue.of(2)));
builder.set("list3", Collections.singletonList(BooleanValue.of(true)));
builder.set(
"blobList", BLOB, Blob.copyFrom(new byte[] {3, 4}), Blob.copyFrom(new byte[] {5, 6}));
builder.set("booleanList", true, false, true);
builder.set("timestampList", Timestamp.now(), Timestamp.now(), Timestamp.now());
builder.set("doubleList", 12.3, 4.56, .789);
builder.set("keyList", KEY, Key.newBuilder("ds2", "k2", "n2").build(),
Key.newBuilder("ds3", "k3", "n3").build());
builder.set("entityList", ENTITY, PARTIAL_ENTITY);
builder.set("stringList", "s1", "s2", "s3");
builder.set("longList", 1, 23, 456);
builder.set("latLngList", LAT_LNG, LAT_LNG);
}
@Test
public void testContains() throws Exception {
BaseEntity<Key> entity = builder.build();
assertTrue(entity.contains("list1"));
assertFalse(entity.contains("bla"));
entity = builder.clear().build();
assertFalse(entity.contains("list1"));
}
@Test
public void testGetValue() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(BlobValue.of(BLOB), entity.getValue("blob"));
}
@Test(expected = DatastoreException.class)
public void testGetValueNotFound() throws Exception {
BaseEntity<Key> entity = builder.clear().build();
entity.getValue("blob");
}
@Test
public void testIsNull() throws Exception {
BaseEntity<Key> entity = builder.build();
assertTrue(entity.isNull("null"));
assertFalse(entity.isNull("blob"));
entity = builder.setNull("blob").build();
assertTrue(entity.isNull("blob"));
}
@Test(expected = DatastoreException.class)
public void testIsNullNotFound() throws Exception {
BaseEntity<Key> entity = builder.clear().build();
entity.isNull("null");
}
@Test
public void testGetString() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals("hello world", entity.getString("string"));
assertEquals("bla", entity.getString("stringValue"));
entity = builder.set("string", "foo").build();
assertEquals("foo", entity.getString("string"));
}
@Test
public void testGetLong() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(125, entity.getLong("long"));
entity = builder.set("long", LongValue.of(10)).build();
assertEquals(10, entity.getLong("long"));
}
@Test
public void testGetDouble() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(1.25, entity.getDouble("double"), 0);
entity = builder.set("double", DoubleValue.of(10)).build();
assertEquals(10, entity.getDouble("double"), 0);
}
@Test
public void testGetBoolean() throws Exception {
BaseEntity<Key> entity = builder.build();
assertTrue(entity.getBoolean("boolean"));
entity = builder.set("boolean", BooleanValue.of(false)).build();
assertFalse(entity.getBoolean("boolean"));
}
@Test
public void testGetTimestamp() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(TIMESTAMP, entity.getTimestamp("timestamp"));
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -1);
Timestamp timestamp = Timestamp.of(cal.getTime());
entity = builder.set("timestamp", TimestampValue.of(timestamp)).build();
assertEquals(timestamp, entity.getTimestamp("timestamp"));
}
@Test
public void testGetLatLng() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(LAT_LNG, entity.getLatLng("latLng"));
}
@Test
public void testGetKey() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(KEY, entity.getKey("key"));
Key key = Key.newBuilder(KEY).setName("BLA").build();
entity = builder.set("key", key).build();
assertEquals(key, entity.getKey("key"));
}
@Test
public void testGetEntity() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(ENTITY, entity.getEntity("entity"));
assertEquals(PARTIAL_ENTITY, entity.getEntity("partialEntity"));
entity = builder.set("entity", EntityValue.of(PARTIAL_ENTITY)).build();
assertEquals(PARTIAL_ENTITY, entity.getEntity("entity"));
}
@Test
public void testGetList() throws Exception {
BaseEntity<Key> entity = builder.build();
List<? extends Value<?>> list = entity.getList("list1");
assertEquals(3, list.size());
assertEquals(NullValue.of(), list.get(0));
assertEquals("foo", list.get(1).get());
assertEquals(LAT_LNG, list.get(2).get());
list = entity.getList("list2");
assertEquals(2, list.size());
assertEquals(Long.valueOf(10), list.get(0).get());
assertEquals(Double.valueOf(2), list.get(1).get());
list = entity.getList("list3");
assertEquals(1, list.size());
assertEquals(Boolean.TRUE, list.get(0).get());
entity = builder.set("list1", ListValue.of(list)).build();
assertEquals(list, entity.getList("list1"));
List<Value<?>> stringList = entity.getList("stringList");
assertEquals(
ImmutableList.of(StringValue.of("s1"), StringValue.of("s2"), StringValue.of("s3")),
stringList);
List<Value<Double>> doubleList = entity.getList("doubleList");
assertEquals(
ImmutableList.of(DoubleValue.of(12.3), DoubleValue.of(4.56), DoubleValue.of(.789)),
doubleList);
List<EntityValue> entityList = entity.getList("entityList");
assertEquals(
ImmutableList.of(EntityValue.of(ENTITY), EntityValue.of(PARTIAL_ENTITY)), entityList);
}
@Test
public void testGetBlob() throws Exception {
BaseEntity<Key> entity = builder.build();
assertEquals(BLOB, entity.getBlob("blob"));
Blob blob = Blob.copyFrom(new byte[] {});
entity = builder.set("blob", BlobValue.of(blob)).build();
assertEquals(blob, entity.getBlob("blob"));
}
@Test
public void testNames() throws Exception {
Set<String> names =
ImmutableSet.<String>builder()
.add("string", "stringValue", "boolean", "double", "long", "list1", "list2", "list3")
.add("entity", "partialEntity", "null", "timestamp", "blob", "key", "blobList")
.add("booleanList", "timestampList", "doubleList", "keyList", "entityList", "stringList")
.add("longList", "latLng", "latLngList")
.build();
BaseEntity<Key> entity = builder.build();
assertEquals(names, entity.getNames());
}
@Test
public void testKey() throws Exception {
builder.setKey(KEY);
BaseEntity<Key> entity = builder.build();
assertEquals(KEY, entity.getKey());
}
}
| |
package de.mvbonline.jmeteranalyzer;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import java.io.*;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Progressing action that imports the jtl file into a sqlite database
*/
class ImportFile implements ProgressingRunnable {
private int counter = 0;
private Map<String, StringBuilder> statements = new HashMap<>();
private Map<String, String> delim = new HashMap<>();
private String status = "";
private boolean sample = false;
private String source;
private boolean createTable;
private Connection c;
private List<String> tables = new ArrayList<>();
public ImportFile(String source, boolean createTable, Connection c) {
this.source = source;
this.createTable = createTable;
this.c = c;
}
@Override
public String getProgress() {
return status;
}
public List<String> getTables() {
return tables;
}
@Override
public void run() {
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(source)));
SAXParser saxParser = SAXParserFactory.newInstance().newSAXParser();
DefaultHandler dh = new DefaultHandler() {
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
if (("httpSample".equals(localName) || "httpSample".equals(qName) || "sample".equals(localName) || "sample".equals(qName)) && !sample) {
if("sample".equals(localName) || "sample".equals(qName)) {
sample = true;
}
String threadName = attributes.getValue("tn").split(" ")[0];
if(!tables.contains(threadName)) {
try {
if(createTable) {
Statement s = c.createStatement();
s.executeUpdate("create table " + threadName + " (" +
"timestamp int not null," +
"name text not null," +
"success int not null," +
"responseCode int not null," +
"responseMessage text not null," +
"duration int not null," +
"responseSize int not null" +
")");
s.close();
}
statements.put(threadName, new StringBuilder("insert into " + threadName + " (" +
"timestamp, " +
"name, " +
"success, " +
"responseCode, " +
"responseMessage, " +
"duration, " +
"responseSize" +
") VALUES "));
delim.put(threadName, "");
} catch (SQLException e) {
e.printStackTrace();
}
tables.add(threadName);
}
String success = attributes.getValue("s");
if ("true".equals(success)) {
success = "1";
} else {
success = "0";
}
try {
statements.get(threadName).append(delim.get(threadName)).append("('").append(
attributes.getValue("ts")).append("','").append(
attributes.getValue("lb")).append("','").append(
success).append("','").append(
attributes.getValue("rc")).append("','").append(
attributes.getValue("rm")).append("','").append(
attributes.getValue("t")).append("','").append(
attributes.getValue("by")).append(
"')");
delim.put(threadName, ", ");
counter++;
status = Integer.toString(counter);
if (counter % 500 == 0 && createTable) {
for(String table : tables) {
String update = statements.get(table).toString();
if(update.endsWith(" VALUES ")) {
continue;
}
Statement s = c.createStatement();
s.executeUpdate(update);
s.close();
statements.remove(table);
statements.put(table, new StringBuilder("insert into " + table + " (" +
"timestamp, " +
"name, " +
"success, " +
"responseCode, " +
"responseMessage, " +
"duration, " +
"responseSize" +
") VALUES "));
delim.put(table, "");
}
}
} catch (SQLException e) {
throw new SAXException(e);
}
}
super.startElement(uri, localName, qName, attributes);
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if("sample".equals(localName) || "sample".equals(qName)) {
sample = false;
}
super.endElement(uri, localName, qName);
}
@Override
public void endDocument() throws SAXException {
super.endDocument();
try {
if(createTable) {
for(String table : tables) {
String update = statements.get(table).toString();
if(update.endsWith(" VALUES ")) {
continue;
}
Statement s = c.createStatement();
s.executeUpdate(update);
s.close();
}
}
} catch (SQLException e) {
throw new SAXException(e);
}
status = "OK";
}
};
saxParser.parse(new InputSource(reader), dh);
} catch (SAXException | ParserConfigurationException | IOException e) {
status = "FAIL";
e.printStackTrace();
System.exit(-1);
}
}
@Override
public void end() {
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.job.service.impl.asyncexecutor;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.api.FlowableIllegalArgumentException;
import org.flowable.common.engine.api.delegate.event.FlowableEngineEventType;
import org.flowable.common.engine.api.delegate.event.FlowableEventDispatcher;
import org.flowable.common.engine.impl.calendar.BusinessCalendar;
import org.flowable.common.engine.impl.cfg.TransactionContext;
import org.flowable.common.engine.impl.cfg.TransactionState;
import org.flowable.common.engine.impl.context.Context;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.job.api.HistoryJob;
import org.flowable.job.api.Job;
import org.flowable.job.api.JobInfo;
import org.flowable.job.service.HistoryJobHandler;
import org.flowable.job.service.HistoryJobProcessor;
import org.flowable.job.service.HistoryJobProcessorContext;
import org.flowable.job.service.JobHandler;
import org.flowable.job.service.JobProcessor;
import org.flowable.job.service.JobProcessorContext;
import org.flowable.job.service.JobServiceConfiguration;
import org.flowable.job.service.event.impl.FlowableJobEventBuilder;
import org.flowable.job.service.impl.HistoryJobProcessorContextImpl;
import org.flowable.job.service.impl.JobProcessorContextImpl;
import org.flowable.job.service.impl.history.async.AsyncHistorySession;
import org.flowable.job.service.impl.history.async.TriggerAsyncHistoryExecutorTransactionListener;
import org.flowable.job.service.impl.persistence.entity.AbstractJobEntity;
import org.flowable.job.service.impl.persistence.entity.AbstractRuntimeJobEntity;
import org.flowable.job.service.impl.persistence.entity.DeadLetterJobEntity;
import org.flowable.job.service.impl.persistence.entity.HistoryJobEntity;
import org.flowable.job.service.impl.persistence.entity.JobByteArrayRef;
import org.flowable.job.service.impl.persistence.entity.JobEntity;
import org.flowable.job.service.impl.persistence.entity.JobInfoEntity;
import org.flowable.job.service.impl.persistence.entity.SuspendedJobEntity;
import org.flowable.job.service.impl.persistence.entity.TimerJobEntity;
import org.flowable.job.service.impl.persistence.entity.TimerJobEntityManager;
import org.flowable.job.service.impl.util.CommandContextUtil;
import org.flowable.variable.api.delegate.VariableScope;
import org.flowable.variable.service.impl.el.NoExecutionVariableScope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
public class DefaultJobManager implements JobManager {
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJobManager.class);
public static final String CYCLE_TYPE = "cycle";
protected JobServiceConfiguration jobServiceConfiguration;
public DefaultJobManager() {
}
public DefaultJobManager(JobServiceConfiguration jobServiceConfiguration) {
this.jobServiceConfiguration = jobServiceConfiguration;
}
@Override
public void createAsyncJob(JobEntity jobEntity, boolean exclusive) {
// When the async executor is activated, the job is directly passed on to the async executor thread
if (isAsyncExecutorActive()) {
internalCreateLockedAsyncJob(jobEntity, exclusive);
} else {
internalCreateAsyncJob(jobEntity, exclusive);
}
}
@Override
public void scheduleAsyncJob(JobEntity jobEntity) {
callJobProcessors(JobProcessorContext.Phase.BEFORE_CREATE, jobEntity);
jobServiceConfiguration.getJobEntityManager().insert(jobEntity);
triggerExecutorIfNeeded(jobEntity);
}
protected void triggerExecutorIfNeeded(JobEntity jobEntity) {
// When the async executor is activated, the job is directly passed on to the async executor thread
if (isAsyncExecutorActive()) {
hintAsyncExecutor(jobEntity);
}
}
@Override
public void scheduleTimerJob(TimerJobEntity timerJob) {
scheduleTimer(timerJob);
sendTimerScheduledEvent(timerJob);
}
private void scheduleTimer(TimerJobEntity timerJob) {
if (timerJob == null) {
throw new FlowableException("Empty timer job can not be scheduled");
}
callJobProcessors(JobProcessorContext.Phase.BEFORE_CREATE, timerJob);
jobServiceConfiguration.getTimerJobEntityManager().insert(timerJob);
}
private void sendTimerScheduledEvent(TimerJobEntity timerJob) {
FlowableEventDispatcher eventDispatcher = CommandContextUtil.getEventDispatcher();
if (eventDispatcher != null && eventDispatcher.isEnabled()) {
eventDispatcher.dispatchEvent(
FlowableJobEventBuilder.createEntityEvent(FlowableEngineEventType.TIMER_SCHEDULED, timerJob));
}
}
@Override
public JobEntity moveTimerJobToExecutableJob(TimerJobEntity timerJob) {
if (timerJob == null) {
throw new FlowableException("Empty timer job can not be scheduled");
}
JobEntity executableJob = createExecutableJobFromOtherJob(timerJob);
boolean insertSuccessful = jobServiceConfiguration.getJobEntityManager().insertJobEntity(executableJob);
if (insertSuccessful) {
jobServiceConfiguration.getTimerJobEntityManager().delete(timerJob);
triggerExecutorIfNeeded(executableJob);
return executableJob;
}
return null;
}
@Override
public TimerJobEntity moveJobToTimerJob(AbstractRuntimeJobEntity job) {
TimerJobEntity timerJob = createTimerJobFromOtherJob(job);
boolean insertSuccessful = jobServiceConfiguration.getTimerJobEntityManager().insertTimerJobEntity(timerJob);
if (insertSuccessful) {
if (job instanceof JobEntity) {
jobServiceConfiguration.getJobEntityManager().delete((JobEntity) job);
} else if (job instanceof SuspendedJobEntity) {
jobServiceConfiguration.getSuspendedJobEntityManager().delete((SuspendedJobEntity) job);
}
return timerJob;
}
return null;
}
@Override
public SuspendedJobEntity moveJobToSuspendedJob(AbstractRuntimeJobEntity job) {
SuspendedJobEntity suspendedJob = createSuspendedJobFromOtherJob(job);
jobServiceConfiguration.getSuspendedJobEntityManager().insert(suspendedJob);
if (job instanceof TimerJobEntity) {
jobServiceConfiguration.getTimerJobEntityManager().delete((TimerJobEntity) job);
} else if (job instanceof JobEntity) {
jobServiceConfiguration.getJobEntityManager().delete((JobEntity) job);
}
return suspendedJob;
}
@Override
public AbstractRuntimeJobEntity activateSuspendedJob(SuspendedJobEntity job) {
AbstractRuntimeJobEntity activatedJob = null;
if (Job.JOB_TYPE_TIMER.equals(job.getJobType())) {
activatedJob = createTimerJobFromOtherJob(job);
jobServiceConfiguration.getTimerJobEntityManager().insert((TimerJobEntity) activatedJob);
} else {
activatedJob = createExecutableJobFromOtherJob(job);
JobEntity jobEntity = (JobEntity) activatedJob;
jobServiceConfiguration.getJobEntityManager().insert(jobEntity);
triggerExecutorIfNeeded(jobEntity);
}
jobServiceConfiguration.getSuspendedJobEntityManager().delete(job);
return activatedJob;
}
@Override
public DeadLetterJobEntity moveJobToDeadLetterJob(AbstractRuntimeJobEntity job) {
DeadLetterJobEntity deadLetterJob = createDeadLetterJobFromOtherJob(job);
jobServiceConfiguration.getDeadLetterJobEntityManager().insert(deadLetterJob);
if (job instanceof TimerJobEntity) {
jobServiceConfiguration.getTimerJobEntityManager().delete((TimerJobEntity) job);
} else if (job instanceof JobEntity) {
jobServiceConfiguration.getJobEntityManager().delete((JobEntity) job);
}
return deadLetterJob;
}
@Override
public JobEntity moveDeadLetterJobToExecutableJob(DeadLetterJobEntity deadLetterJobEntity, int retries) {
if (deadLetterJobEntity == null) {
throw new FlowableIllegalArgumentException("Null job provided");
}
JobEntity executableJob = createExecutableJobFromOtherJob(deadLetterJobEntity);
executableJob.setRetries(retries);
boolean insertSuccessful = jobServiceConfiguration.getJobEntityManager().insertJobEntity(executableJob);
if (insertSuccessful) {
jobServiceConfiguration.getDeadLetterJobEntityManager().delete(deadLetterJobEntity);
triggerExecutorIfNeeded(executableJob);
return executableJob;
}
return null;
}
@Override
public void execute(JobInfo job) {
if (job instanceof HistoryJobEntity) {
callHistoryJobProcessors(HistoryJobProcessorContext.Phase.BEFORE_EXECUTE, (HistoryJobEntity) job);
executeHistoryJob((HistoryJobEntity) job);
} else if (job instanceof JobEntity) {
callJobProcessors(JobProcessorContext.Phase.BEFORE_EXECUTE, (JobEntity) job);
if (Job.JOB_TYPE_MESSAGE.equals(((Job) job).getJobType())) {
executeMessageJob((JobEntity) job);
} else if (Job.JOB_TYPE_TIMER.equals(((Job) job).getJobType())) {
executeTimerJob((JobEntity) job);
}
} else {
throw new FlowableException("Only jobs with type JobEntity are supported to be executed");
}
}
@Override
public void unacquire(JobInfo job) {
if (job instanceof HistoryJob) {
HistoryJobEntity jobEntity = (HistoryJobEntity) job;
HistoryJobEntity newJobEntity = jobServiceConfiguration.getHistoryJobEntityManager().create();
copyHistoryJobInfo(newJobEntity, jobEntity);
newJobEntity.setId(null); // We want a new id to be assigned to this job
newJobEntity.setLockExpirationTime(null);
newJobEntity.setLockOwner(null);
jobServiceConfiguration.getHistoryJobEntityManager().insert(newJobEntity);
jobServiceConfiguration.getHistoryJobEntityManager().deleteNoCascade(jobEntity);
} else if (job instanceof JobEntity) {
// Deleting the old job and inserting it again with another id,
// will avoid that the job is immediately is picked up again (for example
// when doing lots of exclusive jobs for the same process instance)
JobEntity jobEntity = (JobEntity) job;
JobEntity newJobEntity = jobServiceConfiguration.getJobEntityManager().create();
copyJobInfo(newJobEntity, jobEntity);
newJobEntity.setId(null); // We want a new id to be assigned to this job
newJobEntity.setLockExpirationTime(null);
newJobEntity.setLockOwner(null);
jobServiceConfiguration.getJobEntityManager().insert(newJobEntity);
jobServiceConfiguration.getJobEntityManager().delete(jobEntity.getId());
// We're not calling triggerExecutorIfNeeded here after the insert. The unacquire happened
// for a reason (eg queue full or exclusive lock failure). No need to try it immediately again,
// as the chance of failure will be high.
} else {
if (job != null) {
// It could be a v5 job, so simply unlock it.
jobServiceConfiguration.getJobEntityManager().resetExpiredJob(job.getId());
} else {
throw new FlowableException("Programmatic error: null job passed");
}
}
}
@Override
public void unacquireWithDecrementRetries(JobInfo job) {
if (job instanceof HistoryJob) {
HistoryJobEntity historyJobEntity = (HistoryJobEntity) job;
if (historyJobEntity.getRetries() > 0) {
HistoryJobEntity newHistoryJobEntity = jobServiceConfiguration.getHistoryJobEntityManager().create();
copyHistoryJobInfo(newHistoryJobEntity, historyJobEntity);
newHistoryJobEntity.setId(null); // We want a new id to be assigned to this job
newHistoryJobEntity.setLockExpirationTime(null);
newHistoryJobEntity.setLockOwner(null);
newHistoryJobEntity.setCreateTime(jobServiceConfiguration.getClock().getCurrentTime());
newHistoryJobEntity.setRetries(newHistoryJobEntity.getRetries() - 1);
jobServiceConfiguration.getHistoryJobEntityManager().insert(newHistoryJobEntity);
jobServiceConfiguration.getHistoryJobEntityManager().deleteNoCascade(historyJobEntity);
} else {
jobServiceConfiguration.getHistoryJobEntityManager().delete(historyJobEntity);
}
} else {
JobEntity jobEntity = (JobEntity) job;
JobEntity newJobEntity = jobServiceConfiguration.getJobEntityManager().create();
copyJobInfo(newJobEntity, jobEntity);
newJobEntity.setId(null); // We want a new id to be assigned to this job
newJobEntity.setLockExpirationTime(null);
newJobEntity.setLockOwner(null);
if (newJobEntity.getRetries() > 0) {
newJobEntity.setRetries(newJobEntity.getRetries() - 1);
jobServiceConfiguration.getJobEntityManager().insert(newJobEntity);
} else {
DeadLetterJobEntity deadLetterJob = createDeadLetterJobFromOtherJob(newJobEntity);
jobServiceConfiguration.getDeadLetterJobEntityManager().insert(deadLetterJob);
}
jobServiceConfiguration.getJobEntityManager().delete(jobEntity.getId());
// We're not calling triggerExecutorIfNeeded here after the insert. The unacquire happened
// for a reason (eg queue full or exclusive lock failure). No need to try it immediately again,
// as the chance of failure will be high.
}
}
protected void executeMessageJob(JobEntity jobEntity) {
executeJobHandler(jobEntity);
if (jobEntity.getId() != null) {
CommandContextUtil.getJobEntityManager().delete(jobEntity);
}
}
protected void executeHistoryJob(HistoryJobEntity historyJobEntity) {
executeHistoryJobHandler(historyJobEntity);
if (historyJobEntity.getId() != null) {
CommandContextUtil.getHistoryJobEntityManager().delete(historyJobEntity);
}
}
protected void executeTimerJob(JobEntity timerEntity) {
TimerJobEntityManager timerJobEntityManager = jobServiceConfiguration.getTimerJobEntityManager();
VariableScope variableScope = jobServiceConfiguration.getInternalJobManager().resolveVariableScope(timerEntity);
if (variableScope == null) {
variableScope = NoExecutionVariableScope.getSharedInstance();
}
jobServiceConfiguration.getInternalJobManager().preTimerJobDelete(timerEntity, variableScope);
if (timerEntity.getDuedate() != null && !isValidTime(timerEntity, timerEntity.getDuedate(), variableScope)) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Timer {} fired. but the dueDate is after the endDate. Deleting timer.", timerEntity.getId());
}
jobServiceConfiguration.getJobEntityManager().delete(timerEntity);
return;
}
executeJobHandler(timerEntity);
jobServiceConfiguration.getJobEntityManager().delete(timerEntity);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Timer {} fired. Deleting timer.", timerEntity.getId());
}
if (timerEntity.getRepeat() != null) {
TimerJobEntity newTimerJobEntity = timerJobEntityManager.createAndCalculateNextTimer(timerEntity, variableScope);
if (newTimerJobEntity != null) {
jobServiceConfiguration.getInternalJobManager().preRepeatedTimerSchedule(newTimerJobEntity, variableScope);
scheduleTimerJob(newTimerJobEntity);
}
}
}
protected void executeJobHandler(JobEntity jobEntity) {
VariableScope variableScope = jobServiceConfiguration.getInternalJobManager().resolveVariableScope(jobEntity);
Map<String, JobHandler> jobHandlers = jobServiceConfiguration.getJobHandlers();
if (jobEntity.getJobHandlerType() != null) {
JobHandler jobHandler = jobHandlers.get(jobEntity.getJobHandlerType());
if (jobHandler != null) {
jobHandler.execute(jobEntity, jobEntity.getJobHandlerConfiguration(), variableScope, getCommandContext());
} else {
throw new FlowableException("No job handler registered for type " + jobEntity.getJobHandlerType());
}
} else {
throw new FlowableException("Job has no job handler type");
}
}
protected void executeHistoryJobHandler(HistoryJobEntity historyJobEntity) {
Map<String, HistoryJobHandler> jobHandlers = jobServiceConfiguration.getHistoryJobHandlers();
if (historyJobEntity.getJobHandlerType() != null) {
HistoryJobHandler jobHandler = jobHandlers.get(historyJobEntity.getJobHandlerType());
if (jobHandler != null) {
jobHandler.execute(historyJobEntity, historyJobEntity.getJobHandlerConfiguration(), getCommandContext());
} else {
throw new FlowableException("No history job handler registered for type " + historyJobEntity.getJobHandlerType());
}
} else {
throw new FlowableException("Async history job has no job handler type");
}
}
protected boolean isValidTime(JobEntity timerEntity, Date newTimerDate, VariableScope variableScope) {
BusinessCalendar businessCalendar = jobServiceConfiguration.getBusinessCalendarManager().getBusinessCalendar(
getBusinessCalendarName(timerEntity, variableScope));
return businessCalendar.validateDuedate(timerEntity.getRepeat(), timerEntity.getMaxIterations(), timerEntity.getEndDate(), newTimerDate);
}
protected void hintAsyncExecutor(JobEntity job) {
// Verify that correct properties have been set when the async executor will be hinted
if (job.getLockOwner() == null || job.getLockExpirationTime() == null) {
createAsyncJob(job, job.isExclusive());
}
createHintListeners(getAsyncExecutor(), job);
}
protected void createHintListeners(AsyncExecutor asyncExecutor, JobInfoEntity job) {
CommandContext commandContext = CommandContextUtil.getCommandContext();
if (Context.getTransactionContext() != null) {
JobAddedTransactionListener jobAddedTransactionListener = new JobAddedTransactionListener(job, asyncExecutor,
CommandContextUtil.getJobServiceConfiguration(commandContext).getCommandExecutor());
Context.getTransactionContext().addTransactionListener(TransactionState.COMMITTED, jobAddedTransactionListener);
} else {
AsyncJobAddedNotification jobAddedNotification = new AsyncJobAddedNotification(job, asyncExecutor);
commandContext.addCloseListener(jobAddedNotification);
}
}
@Override
public String getBusinessCalendarName(JobEntity timerEntity, VariableScope variableScope) {
String calendarValue = null;
if (StringUtils.isNotEmpty(timerEntity.getJobHandlerConfiguration())) {
try {
JsonNode jobConfigNode = jobServiceConfiguration.getObjectMapper().readTree(timerEntity.getJobHandlerConfiguration());
JsonNode calendarNameNode = jobConfigNode.get("calendarName");
if (calendarNameNode != null && !calendarNameNode.isNull()) {
calendarValue = calendarNameNode.asText();
}
} catch (Exception e) {
// ignore JSON exception
}
}
return getBusinessCalendarName(calendarValue, variableScope);
}
protected String getBusinessCalendarName(String calendarName, VariableScope variableScope) {
String businessCalendarName = CYCLE_TYPE;
if (StringUtils.isNotEmpty(calendarName)) {
businessCalendarName = (String) CommandContextUtil.getJobServiceConfiguration().getExpressionManager()
.createExpression(calendarName).getValue(variableScope);
}
return businessCalendarName;
}
@Override
public HistoryJobEntity scheduleHistoryJob(HistoryJobEntity historyJobEntity) {
callHistoryJobProcessors(HistoryJobProcessorContext.Phase.BEFORE_CREATE, historyJobEntity);
jobServiceConfiguration.getHistoryJobEntityManager().insert(historyJobEntity);
triggerAsyncHistoryExecutorIfNeeded(historyJobEntity);
return historyJobEntity;
}
protected void triggerAsyncHistoryExecutorIfNeeded(HistoryJobEntity historyJobEntity) {
if (isAsyncHistoryExecutorActive()) {
hintAsyncHistoryExecutor(historyJobEntity);
}
}
protected void hintAsyncHistoryExecutor(HistoryJobEntity historyJobEntity) {
if (historyJobEntity.getLockOwner() == null || historyJobEntity.getLockExpirationTime() == null) {
setLockTimeAndOwner(getAsyncHistoryExecutor(), historyJobEntity);
}
createAsyncHistoryHintListeners(historyJobEntity);
}
protected void createAsyncHistoryHintListeners(HistoryJobEntity historyJobEntity) {
CommandContext commandContext = CommandContextUtil.getCommandContext();
AsyncHistorySession asyncHistorySession = commandContext.getSession(AsyncHistorySession.class);
if (asyncHistorySession != null) {
TransactionContext transactionContext = asyncHistorySession.getTransactionContext();
if (transactionContext != null) {
transactionContext.addTransactionListener(TransactionState.COMMITTED, new TriggerAsyncHistoryExecutorTransactionListener(commandContext, historyJobEntity));
}
}
}
protected void internalCreateAsyncJob(JobEntity jobEntity, boolean exclusive) {
fillDefaultAsyncJobInfo(jobEntity, exclusive);
}
protected void internalCreateLockedAsyncJob(JobEntity jobEntity, boolean exclusive) {
fillDefaultAsyncJobInfo(jobEntity, exclusive);
setLockTimeAndOwner(getAsyncExecutor(), jobEntity);
}
protected void setLockTimeAndOwner(AsyncExecutor asyncExecutor , JobInfoEntity jobInfoEntity) {
GregorianCalendar gregorianCalendar = new GregorianCalendar();
gregorianCalendar.setTime(jobServiceConfiguration.getClock().getCurrentTime());
gregorianCalendar.add(Calendar.MILLISECOND, asyncExecutor.getAsyncJobLockTimeInMillis());
jobInfoEntity.setLockExpirationTime(gregorianCalendar.getTime());
jobInfoEntity.setLockOwner(asyncExecutor.getLockOwner());
}
protected void fillDefaultAsyncJobInfo(JobEntity jobEntity, boolean exclusive) {
jobEntity.setJobType(JobEntity.JOB_TYPE_MESSAGE);
jobEntity.setRevision(1);
jobEntity.setRetries(jobServiceConfiguration.getAsyncExecutorNumberOfRetries());
jobEntity.setExclusive(exclusive);
}
@Override
public JobEntity createExecutableJobFromOtherJob(AbstractRuntimeJobEntity job) {
JobEntity executableJob = jobServiceConfiguration.getJobEntityManager().create();
copyJobInfo(executableJob, job);
if (isAsyncExecutorActive()) {
GregorianCalendar gregorianCalendar = new GregorianCalendar();
gregorianCalendar.setTime(jobServiceConfiguration.getClock().getCurrentTime());
gregorianCalendar.add(Calendar.MILLISECOND, getAsyncExecutor().getTimerLockTimeInMillis());
executableJob.setLockExpirationTime(gregorianCalendar.getTime());
executableJob.setLockOwner(getAsyncExecutor().getLockOwner());
}
return executableJob;
}
@Override
public TimerJobEntity createTimerJobFromOtherJob(AbstractRuntimeJobEntity otherJob) {
TimerJobEntity timerJob = jobServiceConfiguration.getTimerJobEntityManager().create();
copyJobInfo(timerJob, otherJob);
return timerJob;
}
@Override
public SuspendedJobEntity createSuspendedJobFromOtherJob(AbstractRuntimeJobEntity otherJob) {
SuspendedJobEntity suspendedJob = jobServiceConfiguration.getSuspendedJobEntityManager().create();
copyJobInfo(suspendedJob, otherJob);
return suspendedJob;
}
@Override
public DeadLetterJobEntity createDeadLetterJobFromOtherJob(AbstractRuntimeJobEntity otherJob) {
DeadLetterJobEntity deadLetterJob = jobServiceConfiguration.getDeadLetterJobEntityManager().create();
copyJobInfo(deadLetterJob, otherJob);
return deadLetterJob;
}
@Override
public AbstractRuntimeJobEntity copyJobInfo(AbstractRuntimeJobEntity copyToJob, AbstractRuntimeJobEntity copyFromJob) {
copyToJob.setDuedate(copyFromJob.getDuedate());
copyToJob.setEndDate(copyFromJob.getEndDate());
copyToJob.setExclusive(copyFromJob.isExclusive());
copyToJob.setExecutionId(copyFromJob.getExecutionId());
copyToJob.setId(copyFromJob.getId());
copyToJob.setJobHandlerConfiguration(copyFromJob.getJobHandlerConfiguration());
copyToJob.setCustomValues(copyFromJob.getCustomValues());
copyToJob.setJobHandlerType(copyFromJob.getJobHandlerType());
copyToJob.setJobType(copyFromJob.getJobType());
copyToJob.setExceptionMessage(copyFromJob.getExceptionMessage());
copyToJob.setExceptionStacktrace(copyFromJob.getExceptionStacktrace());
copyToJob.setMaxIterations(copyFromJob.getMaxIterations());
copyToJob.setProcessDefinitionId(copyFromJob.getProcessDefinitionId());
copyToJob.setProcessInstanceId(copyFromJob.getProcessInstanceId());
copyToJob.setScopeId(copyFromJob.getScopeId());
copyToJob.setSubScopeId(copyFromJob.getSubScopeId());
copyToJob.setScopeType(copyFromJob.getScopeType());
copyToJob.setScopeDefinitionId(copyFromJob.getScopeDefinitionId());
copyToJob.setRepeat(copyFromJob.getRepeat());
copyToJob.setRetries(copyFromJob.getRetries());
copyToJob.setRevision(copyFromJob.getRevision());
copyToJob.setTenantId(copyFromJob.getTenantId());
return copyToJob;
}
protected HistoryJobEntity copyHistoryJobInfo(HistoryJobEntity copyToJob, HistoryJobEntity copyFromJob) {
copyToJob.setId(copyFromJob.getId());
copyToJob.setJobHandlerConfiguration(copyFromJob.getJobHandlerConfiguration());
if (copyFromJob.getAdvancedJobHandlerConfigurationByteArrayRef() != null) {
JobByteArrayRef configurationByteArrayRefCopy = copyFromJob.getAdvancedJobHandlerConfigurationByteArrayRef().copy();
copyToJob.setAdvancedJobHandlerConfigurationByteArrayRef(configurationByteArrayRefCopy);
}
if (copyFromJob.getExceptionByteArrayRef() != null) {
JobByteArrayRef exceptionByteArrayRefCopy = copyFromJob.getExceptionByteArrayRef();
copyToJob.setExceptionByteArrayRef(exceptionByteArrayRefCopy);
}
if (copyFromJob.getCustomValuesByteArrayRef() != null) {
JobByteArrayRef customValuesByteArrayRefCopy = copyFromJob.getCustomValuesByteArrayRef().copy();
copyToJob.setCustomValuesByteArrayRef(customValuesByteArrayRefCopy);
}
copyToJob.setJobHandlerType(copyFromJob.getJobHandlerType());
copyToJob.setExceptionMessage(copyFromJob.getExceptionMessage());
copyToJob.setExceptionStacktrace(copyFromJob.getExceptionStacktrace());
copyToJob.setCustomValues(copyFromJob.getCustomValues());
copyToJob.setRetries(copyFromJob.getRetries());
copyToJob.setRevision(copyFromJob.getRevision());
copyToJob.setScopeType(copyFromJob.getScopeType());
copyToJob.setTenantId(copyFromJob.getTenantId());
return copyToJob;
}
public JobServiceConfiguration getJobServiceConfiguration() {
return jobServiceConfiguration;
}
@Override
public void setJobServiceConfiguration(JobServiceConfiguration jobServiceConfiguration) {
this.jobServiceConfiguration = jobServiceConfiguration;
}
protected boolean isAsyncExecutorActive() {
return isExecutorActive(jobServiceConfiguration.getAsyncExecutor());
}
protected boolean isAsyncHistoryExecutorActive() {
return isExecutorActive(jobServiceConfiguration.getAsyncHistoryExecutor());
}
protected boolean isExecutorActive(AsyncExecutor asyncExecutor) {
return asyncExecutor != null && asyncExecutor.isActive();
}
protected CommandContext getCommandContext() {
return Context.getCommandContext();
}
protected AsyncExecutor getAsyncExecutor() {
return jobServiceConfiguration.getAsyncExecutor();
}
protected AsyncExecutor getAsyncHistoryExecutor() {
return jobServiceConfiguration.getAsyncHistoryExecutor();
}
protected void callJobProcessors(JobProcessorContext.Phase processorType, AbstractJobEntity abstractJobEntity) {
if (jobServiceConfiguration.getJobProcessors() != null) {
JobProcessorContextImpl jobProcessorContext = new JobProcessorContextImpl(processorType, abstractJobEntity);
for (JobProcessor jobProcessor : jobServiceConfiguration.getJobProcessors()) {
jobProcessor.process(jobProcessorContext);
}
}
}
protected void callHistoryJobProcessors(HistoryJobProcessorContext.Phase processorType, HistoryJobEntity historyJobEntity) {
if (jobServiceConfiguration.getHistoryJobProcessors() != null) {
HistoryJobProcessorContextImpl historyJobProcessorContext = new HistoryJobProcessorContextImpl(processorType, historyJobEntity);
for (HistoryJobProcessor historyJobProcessor : jobServiceConfiguration.getHistoryJobProcessors()) {
historyJobProcessor.process(historyJobProcessorContext);
}
}
}
}
| |
package com.example.eric.popularmovies.Layouts;
import android.content.Intent;
import android.content.res.Configuration;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.widget.ImageView;
import com.example.eric.popularmovies.Adapters.FavoriteAdapter;
import com.example.eric.popularmovies.Models.Movie;
import com.example.eric.popularmovies.R;
import com.example.eric.popularmovies.Utils.data.FavoriteContract;
import com.example.eric.popularmovies.Utils.data.FavoriteDataLoader;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import static com.example.eric.popularmovies.Utils.data.FavoriteContract.FavEntryList.COLUMN_BACKDROP;
import static com.example.eric.popularmovies.Utils.data.FavoriteContract.FavEntryList.COLUMN_DATE;
import static com.example.eric.popularmovies.Utils.data.FavoriteContract.FavEntryList.COLUMN_GENRE_ID;
import static com.example.eric.popularmovies.Utils.data.FavoriteContract.FavEntryList.COLUMN_OVERVIEW;
import static com.example.eric.popularmovies.Utils.data.FavoriteContract.FavEntryList.COLUMN_POSTER;
import static com.example.eric.popularmovies.Utils.data.FavoriteContract.FavEntryList.COLUMN_RATE;
import static com.example.eric.popularmovies.Utils.data.FavoriteContract.FavEntryList.COLUMN_TITLE;
public class FavoriteActivity extends AppCompatActivity implements FavoriteAdapter.ListItemClickListener {
@BindView(R.id.fav_rv)
RecyclerView recyclerView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_favorite);
ButterKnife.bind(this);
Toolbar toolbar = findViewById(R.id.fav_toolbar);
if (toolbar != null) {
setSupportActionBar(toolbar);
setTitle(R.string.title_favorites);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
}
getSupportLoaderManager().initLoader(1, null, cursorLoader);
setLayoutManager();
}
@Override
protected void onResume() {
super.onResume();
getSupportLoaderManager().restartLoader(1, null, cursorLoader);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.fav_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
break;
case R.id.delete_all:
deleteAllData();
break;
}
return true;
}
public void setLayoutManager() {
boolean isTablet = getResources().getBoolean(R.bool.isTablet);
GridLayoutManager layoutManager;
if (!isTablet) {
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
layoutManager = new GridLayoutManager(FavoriteActivity.this, 2);
recyclerView.setLayoutManager(layoutManager);
} else {
layoutManager = new GridLayoutManager(FavoriteActivity.this, 4);
recyclerView.setLayoutManager(layoutManager);
}
} else {
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
layoutManager = new GridLayoutManager(FavoriteActivity.this, 4);
recyclerView.setLayoutManager(layoutManager);
} else {
layoutManager = new GridLayoutManager(FavoriteActivity.this, 6);
recyclerView.setLayoutManager(layoutManager);
}
}
}
//deletes all favorites data from the database
public void deleteAllData() {
Uri uri = FavoriteContract.FavEntryList.FAVORITES_CONTENT_URI;
int fb = getContentResolver().delete(uri, null, null);
recyclerView.invalidate();
if (fb != 0) {
Intent intent = getIntent();
finish();
overridePendingTransition(0, 0);
startActivity(intent);
overridePendingTransition(0, 0);
}
getSupportLoaderManager().restartLoader(1, null, cursorLoader);
}
/**
* @param position the adapter position
* @param mdata Lists of Movie data
* @description sends data to MovieDetails class
*/
private void sendFavIntentData(int position, List<Movie> mdata) {
Movie movie = mdata.get(position);
Intent intent = new Intent(this, MovieDetails.class);
intent.putExtra("original_title", movie.getOriginalTitle());
intent.putExtra("vote_average", movie.getVoteAverage());
intent.putExtra("overview", movie.getOverview());
intent.putExtra("release_date", movie.getReleaseDate());
intent.putExtra("poster_path", movie.getPosterPath());
intent.putExtra("backdrop_path", movie.getBackdropPath());
intent.putExtra("total_pages", movie.getTotalPages());
intent.putExtra("genre_ids", movie.getGenreIds());
intent.putExtra("movies_id", movie.getId());
startActivity(intent);
}
//FavoriteDataLoader<Cursor> Callbacks
private LoaderManager.LoaderCallbacks<Cursor> cursorLoader = new LoaderManager.LoaderCallbacks<Cursor>() {
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
return new FavoriteDataLoader(FavoriteActivity.this);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
List<Movie> favModel = new ArrayList<>();
if (data != null) {
while (data.moveToNext()) {
int id_index = data.getColumnIndex(FavoriteContract.FavEntryList._ID);
int title_index = data.getColumnIndex(COLUMN_TITLE);
int genre_index = data.getColumnIndex(COLUMN_GENRE_ID);
int date_index = data.getColumnIndex(COLUMN_DATE);
int rate_index = data.getColumnIndex(COLUMN_RATE);
int overview_index = data.getColumnIndex(COLUMN_OVERVIEW);
int poster_index = data.getColumnIndex(COLUMN_POSTER);
int backdrop_index = data.getColumnIndex(COLUMN_BACKDROP);
int mID = data.getInt(id_index);
String mTitle = data.getString(title_index);
int mGenre = data.getInt(genre_index);
String mDate = data.getString(date_index);
String mRate = data.getString(rate_index);
String mOverview = data.getString(overview_index);
String mPoster = data.getString(poster_index);
String mBackDrop = data.getString(backdrop_index);
Movie model = new Movie(mTitle, mRate, mPoster, mOverview, mDate, mID, mGenre, mBackDrop);
favModel.add(model);
FavoriteAdapter adapter = new FavoriteAdapter(FavoriteActivity.this, FavoriteActivity.this, favModel);
recyclerView.setAdapter(adapter);
recyclerView.setHasFixedSize(true);
}
data.close();
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
};
@Override
public void onClick(int position, List<Movie> movies) {
sendFavIntentData(position, movies);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.xml;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.NODE_TYPES_PATH;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Stack;
import java.util.UUID;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.jcr.ImportUUIDBehavior;
import javax.jcr.ItemExistsException;
import javax.jcr.PathNotFoundException;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.Value;
import javax.jcr.lock.LockException;
import javax.jcr.nodetype.ConstraintViolationException;
import javax.jcr.nodetype.NodeDefinition;
import javax.jcr.nodetype.PropertyDefinition;
import javax.jcr.version.VersionException;
import javax.jcr.version.VersionManager;
import com.google.common.base.Function;
import com.google.common.base.Predicates;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.api.ContentSession;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate;
import org.apache.jackrabbit.oak.jcr.security.AccessManager;
import org.apache.jackrabbit.oak.jcr.session.SessionContext;
import org.apache.jackrabbit.oak.jcr.session.WorkspaceImpl;
import org.apache.jackrabbit.oak.plugins.identifier.IdentifierManager;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.plugins.nodetype.DefinitionProvider;
import org.apache.jackrabbit.oak.plugins.nodetype.EffectiveNodeTypeProvider;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions;
import org.apache.jackrabbit.oak.spi.xml.Importer;
import org.apache.jackrabbit.oak.spi.xml.NodeInfo;
import org.apache.jackrabbit.oak.spi.xml.PropInfo;
import org.apache.jackrabbit.oak.spi.xml.ProtectedItemImporter;
import org.apache.jackrabbit.oak.spi.xml.ProtectedNodeImporter;
import org.apache.jackrabbit.oak.spi.xml.ProtectedPropertyImporter;
import org.apache.jackrabbit.oak.spi.xml.ReferenceChangeTracker;
import org.apache.jackrabbit.oak.util.TreeUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ImporterImpl implements Importer {
private static final Logger log = LoggerFactory.getLogger(ImporterImpl.class);
private final Tree importTargetTree;
private final Tree ntTypesRoot;
private final int uuidBehavior;
private final String userID;
private final AccessManager accessManager;
private final EffectiveNodeTypeProvider effectiveNodeTypeProvider;
private final DefinitionProvider definitionProvider;
private final IdResolver idLookup;
private final Stack<Tree> parents;
/**
* helper object that keeps track of remapped uuid's and imported reference
* properties that might need correcting depending on the uuid mappings
*/
private final ReferenceChangeTracker refTracker;
private final List<ProtectedItemImporter> pItemImporters = new ArrayList<ProtectedItemImporter>();
/**
* Currently active importer for protected nodes.
*/
private ProtectedNodeImporter pnImporter;
/**
* Creates a new importer instance.
*
* @param absPath The absolute JCR paths such as passed to the JCR call.
* @param sessionContext The context of the editing session
* @param root The write {@code Root}, which in case of a workspace import
* is different from the {@code Root} associated with the editing session.
* @param uuidBehavior The uuid behavior
* @param isWorkspaceImport {@code true} if this is a workspace import,
* {@code false} otherwise.
* @throws javax.jcr.RepositoryException If the initial validation of the
* path or the state of target node/session fails.
*/
public ImporterImpl(String absPath,
SessionContext sessionContext,
Root root,
int uuidBehavior,
boolean isWorkspaceImport) throws RepositoryException {
String oakPath = sessionContext.getOakPath(absPath);
if (oakPath == null) {
throw new RepositoryException("Invalid name or path: " + absPath);
}
if (!PathUtils.isAbsolute(oakPath)) {
throw new RepositoryException("Not an absolute path: " + absPath);
}
SessionDelegate sd = sessionContext.getSessionDelegate();
if (isWorkspaceImport && sd.hasPendingChanges()) {
throw new RepositoryException("Pending changes on session. Cannot run workspace import.");
}
this.uuidBehavior = uuidBehavior;
userID = sd.getAuthInfo().getUserID();
importTargetTree = root.getTree(oakPath);
if (!importTargetTree.exists()) {
throw new PathNotFoundException(absPath);
}
WorkspaceImpl wsp = sessionContext.getWorkspace();
VersionManager vMgr = wsp.getVersionManager();
if (!vMgr.isCheckedOut(absPath)) {
throw new VersionException("Target node is checked in.");
}
if (importTargetTree.getStatus() != Tree.Status.NEW && wsp.getLockManager().isLocked(absPath)) {
throw new LockException("Target node is locked.");
}
effectiveNodeTypeProvider = wsp.getNodeTypeManager();
definitionProvider = wsp.getNodeTypeManager();
ntTypesRoot = root.getTree(NODE_TYPES_PATH);
accessManager = sessionContext.getAccessManager();
idLookup = new IdResolver(root, sd.getContentSession());
refTracker = new ReferenceChangeTracker();
parents = new Stack<Tree>();
parents.push(importTargetTree);
pItemImporters.clear();
for (ProtectedItemImporter importer : sessionContext.getProtectedItemImporters()) {
// FIXME this passes the session scoped name path mapper also for workspace imports
if (importer.init(sessionContext.getSession(), root, sessionContext, isWorkspaceImport, uuidBehavior, refTracker, sessionContext.getSecurityProvider())) {
pItemImporters.add(importer);
}
}
}
private Tree createTree(@Nonnull Tree parent, @Nonnull NodeInfo nInfo, @CheckForNull String uuid) throws RepositoryException {
String ntName = nInfo.getPrimaryTypeName();
Tree child = TreeUtil.addChild(
parent, nInfo.getName(), ntName, ntTypesRoot, userID);
if (ntName != null) {
accessManager.checkPermissions(child, child.getProperty(JcrConstants.JCR_PRIMARYTYPE), Permissions.NODE_TYPE_MANAGEMENT);
}
if (uuid != null) {
child.setProperty(JcrConstants.JCR_UUID, uuid);
}
for (String mixin : nInfo.getMixinTypeNames()) {
TreeUtil.addMixin(child, mixin, ntTypesRoot, userID);
}
return child;
}
private void createProperty(Tree tree, PropInfo pInfo, PropertyDefinition def) throws RepositoryException {
List<Value> values = pInfo.getValues(pInfo.getTargetType(def));
PropertyState propertyState;
String name = pInfo.getName();
int type = pInfo.getType();
if (values.size() == 1 && !def.isMultiple()) {
propertyState = PropertyStates.createProperty(name, values.get(0));
} else {
propertyState = PropertyStates.createProperty(name, values);
}
tree.setProperty(propertyState);
if (type == PropertyType.REFERENCE || type == PropertyType.WEAKREFERENCE) {
// store reference for later resolution
refTracker.processedReference(new Reference(tree, name));
}
}
private Tree resolveUUIDConflict(Tree parent,
Tree conflicting,
String conflictingId,
NodeInfo nodeInfo) throws RepositoryException {
Tree tree;
if (uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW) {
// create new with new uuid
tree = createTree(parent, nodeInfo, UUID.randomUUID().toString());
// remember uuid mapping
if (isNodeType(tree, JcrConstants.MIX_REFERENCEABLE)) {
refTracker.put(nodeInfo.getUUID(), TreeUtil.getString(tree, JcrConstants.JCR_UUID));
}
} else if (uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_THROW) {
// if conflicting node is shareable, then clone it
String msg = "a node with uuid " + nodeInfo.getUUID() + " already exists!";
log.debug(msg);
throw new ItemExistsException(msg);
} else if (uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_REMOVE_EXISTING) {
if (conflicting == null) {
// since the conflicting node can't be read,
// we can't remove it
String msg = "node with uuid " + conflictingId + " cannot be removed";
log.debug(msg);
throw new RepositoryException(msg);
}
// make sure conflicting node is not importTargetNode or an ancestor thereof
if (importTargetTree.getPath().startsWith(conflicting.getPath())) {
String msg = "cannot remove ancestor node";
log.debug(msg);
throw new ConstraintViolationException(msg);
}
// remove conflicting
conflicting.remove();
// create new with given uuid
tree = createTree(parent, nodeInfo, nodeInfo.getUUID());
} else if (uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_REPLACE_EXISTING) {
if (conflicting == null) {
// since the conflicting node can't be read,
// we can't replace it
String msg = "node with uuid " + conflictingId + " cannot be replaced";
log.debug(msg);
throw new RepositoryException(msg);
}
if (conflicting.isRoot()) {
String msg = "root node cannot be replaced";
log.debug(msg);
throw new RepositoryException(msg);
}
// 'replace' current parent with parent of conflicting
parent = conflicting.getParent();
// replace child node
//TODO ordering! (what happened to replace?)
conflicting.remove();
tree = createTree(parent, nodeInfo, nodeInfo.getUUID());
} else {
String msg = "unknown uuidBehavior: " + uuidBehavior;
log.debug(msg);
throw new RepositoryException(msg);
}
return tree;
}
private void importProperties(@Nonnull Tree tree,
@Nonnull List<PropInfo> propInfos,
boolean ignoreRegular) throws RepositoryException {
// process properties
for (PropInfo pi : propInfos) {
// find applicable definition
//TODO find better heuristics?
PropertyDefinition def = pi.getPropertyDef(effectiveNodeTypeProvider.getEffectiveNodeType(tree));
if (def.isProtected()) {
// skip protected property
log.debug("Protected property {}", pi.getName());
// notify the ProtectedPropertyImporter.
for (ProtectedPropertyImporter ppi : getPropertyImporters()) {
if (ppi.handlePropInfo(tree, pi, def)) {
log.debug("Protected property -> delegated to ProtectedPropertyImporter");
break;
} /* else: p-i-Importer isn't able to deal with this property. try next pp-importer */
}
} else if (!ignoreRegular) {
// regular property -> create the property
createProperty(tree, pi, def);
}
}
for (ProtectedPropertyImporter ppi : getPropertyImporters()) {
ppi.propertiesCompleted(tree);
}
}
private Iterable<ProtectedPropertyImporter> getPropertyImporters() {
return Iterables.filter(Iterables.transform(pItemImporters, new Function<ProtectedItemImporter, ProtectedPropertyImporter>() {
@Nullable
@Override
public ProtectedPropertyImporter apply(@Nullable ProtectedItemImporter importer) {
if (importer instanceof ProtectedPropertyImporter) {
return (ProtectedPropertyImporter) importer;
} else {
return null;
}
}
}), Predicates.notNull());
}
private Iterable<ProtectedNodeImporter> getNodeImporters() {
return Iterables.filter(Iterables.transform(pItemImporters, new Function<ProtectedItemImporter, ProtectedNodeImporter>() {
@Nullable
@Override
public ProtectedNodeImporter apply(@Nullable ProtectedItemImporter importer) {
if (importer instanceof ProtectedNodeImporter) {
return (ProtectedNodeImporter) importer;
} else {
return null;
}
}
}), Predicates.notNull());
}
//-----------------------------------------------------------< Importer >---
@Override
public void start() throws RepositoryException {
// nop
}
@Override
public void startNode(NodeInfo nodeInfo, List<PropInfo> propInfos)
throws RepositoryException {
Tree parent = parents.peek();
Tree tree = null;
String id = nodeInfo.getUUID();
String nodeName = nodeInfo.getName();
String ntName = nodeInfo.getPrimaryTypeName();
if (parent == null) {
log.debug("Skipping node: {}", nodeName);
// parent node was skipped, skip this child node too
parents.push(null); // push null onto stack for skipped node
// notify the p-i-importer
if (pnImporter != null) {
pnImporter.startChildInfo(nodeInfo, propInfos);
}
return;
}
NodeDefinition parentDef = getDefinition(parent);
if (parentDef.isProtected()) {
// skip protected node
parents.push(null);
log.debug("Skipping protected node: {}", nodeName);
if (pnImporter != null) {
// pnImporter was already started (current nodeInfo is a sibling)
// notify it about this child node.
pnImporter.startChildInfo(nodeInfo, propInfos);
} else {
// no importer defined yet:
// test if there is a ProtectedNodeImporter among the configured
// importers that can handle this.
// if there is one, notify the ProtectedNodeImporter about the
// start of a item tree that is protected by this parent. If it
// potentially is able to deal with it, notify it about the child node.
for (ProtectedNodeImporter pni : getNodeImporters()) {
if (pni.start(parent)) {
log.debug("Protected node -> delegated to ProtectedNodeImporter");
pnImporter = pni;
pnImporter.startChildInfo(nodeInfo, propInfos);
break;
} /* else: p-i-Importer isn't able to deal with the protected tree.
try next. and if none can handle the passed parent the
tree below will be skipped */
}
}
return;
}
if (parent.hasChild(nodeName)) {
// a node with that name already exists...
Tree existing = parent.getChild(nodeName);
NodeDefinition def = getDefinition(existing);
if (!def.allowsSameNameSiblings()) {
// existing doesn't allow same-name siblings,
// check for potential conflicts
if (def.isProtected() && isNodeType(existing, ntName)) {
/*
use the existing node as parent for the possible subsequent
import of a protected tree, that the protected node importer
may or may not be able to deal with.
-> upon the next 'startNode' the check for the parent being
protected will notify the protected node importer.
-> if the importer is able to deal with that node it needs
to care of the complete subtree until it is notified
during the 'endNode' call.
-> if the import can't deal with that node or if that node
is the a leaf in the tree to be imported 'end' will
not have an effect on the importer, that was never started.
*/
log.debug("Skipping protected node: {}", existing);
parents.push(existing);
/**
* let ProtectedPropertyImporters handle the properties
* associated with the imported node. this may include overwriting,
* merging or just adding missing properties.
*/
importProperties(existing, propInfos, true);
return;
}
if (def.isAutoCreated() && isNodeType(existing, ntName)) {
// this node has already been auto-created, no need to create it
tree = existing;
} else {
// edge case: colliding node does have same uuid
// (see http://issues.apache.org/jira/browse/JCR-1128)
String existingIdentifier = IdentifierManager.getIdentifier(existing);
if (!(existingIdentifier.equals(id)
&& (uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_REMOVE_EXISTING
|| uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_REPLACE_EXISTING))) {
throw new ItemExistsException(
"Node with the same UUID exists:" + existing);
}
// fall through
}
}
}
if (tree == null) {
// create node
if (id == null) {
// no potential uuid conflict, always add new node
tree = createTree(parent, nodeInfo, null);
} else if (uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW) {
// always create a new UUID even if no
// conflicting node exists. see OAK-1244
tree = createTree(parent, nodeInfo, UUID.randomUUID().toString());
// remember uuid mapping
if (isNodeType(tree, JcrConstants.MIX_REFERENCEABLE)) {
refTracker.put(nodeInfo.getUUID(), TreeUtil.getString(tree, JcrConstants.JCR_UUID));
}
} else {
Tree conflicting = idLookup.getConflictingTree(id);
if (conflicting != null && conflicting.exists()) {
// resolve uuid conflict
tree = resolveUUIDConflict(parent, conflicting, id, nodeInfo);
if (tree == null) {
// no new node has been created, so skip this node
parents.push(null); // push null onto stack for skipped node
log.debug("Skipping existing node {}", nodeInfo.getName());
return;
}
} else {
// create new with given uuid
tree = createTree(parent, nodeInfo, id);
}
}
}
// process properties
importProperties(tree, propInfos, false);
if (tree.exists()) {
parents.push(tree);
}
}
@Override
public void endNode(NodeInfo nodeInfo) throws RepositoryException {
Tree parent = parents.pop();
if (parent == null) {
if (pnImporter != null) {
pnImporter.endChildInfo();
}
} else if (getDefinition(parent).isProtected()) {
if (pnImporter != null) {
pnImporter.end(parent);
// and reset the pnImporter field waiting for the next protected
// parent -> selecting again from available importers
pnImporter = null;
}
}
idLookup.rememberImportedUUIDs(parent);
}
@Override
public void end() throws RepositoryException {
/**
* adjust references that refer to uuids which have been mapped to
* newly generated uuids on import
*/
// 1. let protected property/node importers handle protected ref-properties
// and (protected) properties underneath a protected parent node.
for (ProtectedItemImporter ppi : pItemImporters) {
ppi.processReferences();
}
// 2. regular non-protected properties.
Iterator<Object> iter = refTracker.getProcessedReferences();
while (iter.hasNext()) {
Object ref = iter.next();
if (!(ref instanceof Reference)) {
continue;
}
Reference reference = (Reference) ref;
if (reference.isMultiple()) {
Iterable<String> values = reference.property.getValue(Type.STRINGS);
List<String> newValues = Lists.newArrayList();
for (String original : values) {
String adjusted = refTracker.get(original);
if (adjusted != null) {
newValues.add(adjusted);
} else {
// reference doesn't need adjusting, just copy old value
newValues.add(original);
}
}
reference.setProperty(newValues);
} else {
String original = reference.property.getValue(Type.STRING);
String adjusted = refTracker.get(original);
if (adjusted != null) {
reference.setProperty(adjusted);
}
}
}
refTracker.clear();
}
private boolean isNodeType(Tree tree, String ntName) throws RepositoryException {
return effectiveNodeTypeProvider.isNodeType(tree, ntName);
}
private NodeDefinition getDefinition(Tree tree) throws RepositoryException {
if (tree.isRoot()) {
return definitionProvider.getRootDefinition();
} else {
return definitionProvider.getDefinition(tree.getParent(), tree);
}
}
private static final class Reference {
private final Tree tree;
private final PropertyState property;
private Reference(Tree tree, String propertyName) {
this.tree = tree;
this.property = tree.getProperty(propertyName);
}
private boolean isMultiple() {
return property.isArray();
}
private void setProperty(String newValue) {
PropertyState prop = PropertyStates.createProperty(property.getName(), newValue, property.getType().tag());
tree.setProperty(prop);
}
private void setProperty(Iterable<String> newValues) {
PropertyState prop = PropertyStates.createProperty(property.getName(), newValues, property.getType());
tree.setProperty(prop);
}
}
/**
* Resolves 'uuid' property values to {@code Tree} objects and optionally
* keeps track of newly imported UUIDs.
*/
private static final class IdResolver {
/**
* There are two IdentifierManagers used.
*
* 1) currentStateIdManager - Associated with current root on which all import
* operations are being performed
*
* 2) baseStateIdManager - Associated with the initial root on which
* no modifications are performed
*/
private final IdentifierManager currentStateIdManager;
private final IdentifierManager baseStateIdManager;
/**
* Set of newly created uuid from nodes which are
* created in this import, which are only remembered if the editing
* session doesn't have any pending transient changes preventing this
* performance optimisation from working properly (see OAK-2246).
*/
private final Set<String> importedUUIDs;
private IdResolver(@Nonnull Root root, @Nonnull ContentSession contentSession) {
currentStateIdManager = new IdentifierManager(root);
baseStateIdManager = new IdentifierManager(contentSession.getLatestRoot());
if (!root.hasPendingChanges()) {
importedUUIDs = new HashSet<String>();
} else {
importedUUIDs = null;
}
}
@CheckForNull
private Tree getConflictingTree(@Nonnull String id) {
//1. First check from base state that tree corresponding to
//this id exist
Tree conflicting = baseStateIdManager.getTree(id);
if (conflicting == null && importedUUIDs != null) {
//1.a. Check if id is found in newly created nodes
if (importedUUIDs.contains(id)) {
conflicting = currentStateIdManager.getTree(id);
}
} else {
//1.b Re obtain the conflicting tree from Id Manager
//associated with current root. Such that any operation
//on it gets reflected in later operations
//In case a tree with same id was removed earlier then it
//would return null
conflicting = currentStateIdManager.getTree(id);
}
return conflicting;
}
private void rememberImportedUUIDs(@CheckForNull Tree tree) {
if (tree == null || importedUUIDs == null) {
return;
}
String uuid = TreeUtil.getString(tree, JcrConstants.JCR_UUID);
if (uuid != null) {
importedUUIDs.add(uuid);
}
for (Tree child : tree.getChildren()) {
rememberImportedUUIDs(child);
}
}
}
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hyracks.storage.am.rtree.impls;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import org.apache.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.storage.common.buffercache.ICachedPage;
import org.apache.hyracks.storage.common.file.BufferedFileHandle;
public class RTreeSearchCursor implements ITreeIndexCursor {
private int fileId = -1;
private ICachedPage page = null;
private IRTreeInteriorFrame interiorFrame = null;
protected IRTreeLeafFrame leafFrame = null;
private IBufferCache bufferCache = null;
private SearchPredicate pred;
private PathList pathList;
private int rootPage;
protected ITupleReference searchKey;
private int tupleIndex = 0;
private int tupleIndexInc = 0;
private int currentTupleIndex = 0;
private int pageId = -1;
protected MultiComparator cmp;
private ITreeIndexTupleReference frameTuple;
private boolean readLatched = false;
public RTreeSearchCursor(IRTreeInteriorFrame interiorFrame, IRTreeLeafFrame leafFrame) {
this.interiorFrame = interiorFrame;
this.leafFrame = leafFrame;
this.frameTuple = leafFrame.createTupleReference();
}
@Override
public void close() throws HyracksDataException {
if (readLatched) {
page.releaseReadLatch();
bufferCache.unpin(page);
readLatched = false;
}
tupleIndex = 0;
tupleIndexInc = 0;
page = null;
pathList = null;
}
@Override
public ITupleReference getTuple() {
return frameTuple;
}
public int getTupleOffset() {
return leafFrame.getTupleOffset(currentTupleIndex);
}
public int getPageId() {
return pageId;
}
@Override
public ICachedPage getPage() {
return page;
}
protected boolean fetchNextLeafPage() throws HyracksDataException {
boolean succeeded = false;
if (readLatched) {
page.releaseReadLatch();
bufferCache.unpin(page);
readLatched = false;
}
while (!pathList.isEmpty()) {
int pageId = pathList.getLastPageId();
long parentLsn = pathList.getLastPageLsn();
pathList.moveLast();
ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
node.acquireReadLatch();
readLatched = true;
try {
interiorFrame.setPage(node);
boolean isLeaf = interiorFrame.isLeaf();
long pageLsn = interiorFrame.getPageLsn();
if (pageId != rootPage && parentLsn < interiorFrame.getPageNsn()) {
// Concurrent split detected, we need to visit the right
// page
int rightPage = interiorFrame.getRightPage();
if (rightPage != -1) {
pathList.add(rightPage, parentLsn, -1);
}
}
if (!isLeaf) {
// We do DFS so that we get the tuples ordered (for disk
// RTrees only) in the case we we are using total order
// (such as Hilbert order)
if (searchKey != null) {
for (int i = interiorFrame.getTupleCount() - 1; i >= 0; i--) {
int childPageId = interiorFrame.getChildPageIdIfIntersect(searchKey, i, cmp);
if (childPageId != -1) {
pathList.add(childPageId, pageLsn, -1);
}
}
} else {
for (int i = interiorFrame.getTupleCount() - 1; i >= 0; i--) {
int childPageId = interiorFrame.getChildPageId(i);
pathList.add(childPageId, pageLsn, -1);
}
}
} else {
page = node;
this.pageId = pageId; // This is only needed for the
// LSMRTree flush operation
leafFrame.setPage(page);
tupleIndex = 0;
succeeded = true;
return true;
}
} finally {
if (!succeeded) {
if (readLatched) {
node.releaseReadLatch();
readLatched = false;
bufferCache.unpin(node);
}
}
}
}
return false;
}
@Override
public boolean hasNext() throws HyracksDataException {
if (page == null) {
return false;
}
if (tupleIndex == leafFrame.getTupleCount()) {
if (!fetchNextLeafPage()) {
return false;
}
}
do {
for (int i = tupleIndex; i < leafFrame.getTupleCount(); i++) {
if (searchKey != null) {
if (leafFrame.intersect(searchKey, i, cmp)) {
frameTuple.resetByTupleIndex(leafFrame, i);
currentTupleIndex = i; // This is only needed for the
// LSMRTree flush operation
tupleIndexInc = i + 1;
return true;
}
} else {
frameTuple.resetByTupleIndex(leafFrame, i);
currentTupleIndex = i; // This is only needed for the
// LSMRTree
// flush operation
tupleIndexInc = i + 1;
return true;
}
}
} while (fetchNextLeafPage());
return false;
}
@Override
public void next() throws HyracksDataException {
tupleIndex = tupleIndexInc;
}
@Override
public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
// in case open is called multiple times without closing
if (this.page != null) {
this.page.releaseReadLatch();
readLatched = false;
bufferCache.unpin(this.page);
pathList.clear();
}
pathList = ((RTreeCursorInitialState) initialState).getPathList();
rootPage = ((RTreeCursorInitialState) initialState).getRootPage();
pred = (SearchPredicate) searchPred;
cmp = pred.getLowKeyComparator();
searchKey = pred.getSearchKey();
if (searchKey != null) {
int maxFieldPos = cmp.getKeyFieldCount() / 2;
for (int i = 0; i < maxFieldPos; i++) {
int j = maxFieldPos + i;
int c = cmp.getComparators()[i].compare(searchKey.getFieldData(i), searchKey.getFieldStart(i),
searchKey.getFieldLength(i), searchKey.getFieldData(j), searchKey.getFieldStart(j),
searchKey.getFieldLength(j));
if (c > 0) {
throw new IllegalArgumentException(
"The low key point has larger coordinates than the high key point.");
}
}
}
pathList.add(this.rootPage, -1, -1);
tupleIndex = 0;
fetchNextLeafPage();
}
@Override
public void reset() throws HyracksDataException {
close();
}
@Override
public void setBufferCache(IBufferCache bufferCache) {
this.bufferCache = bufferCache;
}
@Override
public void setFileId(int fileId) {
this.fileId = fileId;
}
@Override
public boolean exclusiveLatchNodes() {
return false;
}
@Override
public void markCurrentTupleAsUpdated() throws HyracksDataException {
throw new HyracksDataException("Updating tuples is not supported with this cursor.");
}
}
| |
package application;
import application.Constants.GUIconstants;
import javafx.geometry.Point2D;
import javafx.scene.Group;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.shape.Line;
/**
*
* @author Wesley Valentine
* @author Nick Balkisoon
*
*/
public class Turtle {
protected double myXLocation;
protected double myYLocation;
protected double myOrientation;
protected SLogoPen myPen;
private ImageView myImage;
public Turtle(double x, double y) {
myXLocation = x;
myYLocation = y;
myOrientation = 0;
myPen = new SLogoPen();
myImage = new ImageView();
this.myImage.setX(x);
this.myImage.setY(y);
myImage.setRotate(GUIconstants.TURTLE_IMAGE_START_ROTATION);
}
/**
* Enter a value of the distance to move. A positive value will move
* forward, a negative value will move backwards. A line is returned,
* corresponding to the correct drawing. Turtle will wrap around the
* SLogoCanvas.
*
* @param distance
*/
public Line step(double distance) {
Point2D start = this.getLocation();
double radian = Math.toRadians(myOrientation);
myXLocation += distance * Math.cos(radian);
myYLocation -= distance * Math.sin(radian);
if (myXLocation > GUIconstants.DEFAULT_CANVAS_WIDTH) {
myXLocation = myXLocation % GUIconstants.DEFAULT_CANVAS_WIDTH;
return new Line();
} else if (myXLocation < 0) {
myXLocation = GUIconstants.DEFAULT_CANVAS_WIDTH - myXLocation;
return new Line();
}
if (myYLocation > GUIconstants.DEFAULT_CANVAS_HEIGHT) {
myYLocation = myYLocation % GUIconstants.DEFAULT_CANVAS_HEIGHT;
return new Line();
} else if (myYLocation < 0) {
myYLocation = GUIconstants.DEFAULT_CANVAS_HEIGHT - myYLocation;
return new Line();
}
this.myImage.setX(this.myXLocation);
this.myImage.setY(this.myYLocation);
return myPen.drawLine(start, this.getLocation());
}
/**
* Moves the turtle using a series of steps. If-statements are used to make
* the status of the pen visible. The use of steps could potentially be used
* for animation.
*
* @param distance
* number of pixels to move the turtle, positive value moves
* forward and negative moves backwards
* @return returns a Group containing all of the lines generated from each
* individual step
*/
public Group move(double distance) {
Group root = new Group();
double direction = distance / Math.abs(distance);
double remainder = Math.abs(distance);
while (remainder > 0) {
if (this.getPen().isDashed()
&& remainder >= GUIconstants.TURTLE_DASH_MOVE_DISTANCE) {
root.getChildren().add(
this.step(GUIconstants.TURTLE_DASH_MOVE_DISTANCE
* direction));
remainder -= 30;
} else if (this.getPen().isDotted()
&& remainder >= GUIconstants.TURTLE_DOT_MOVE_DISTANCE) {
root.getChildren().add(
this.step(GUIconstants.TURTLE_DOT_MOVE_DISTANCE
* direction));
remainder -= 5;
} else {
root.getChildren().add(
this.step(GUIconstants.TURTLE_SOLID_MOVE_DISTANCE
* direction));
remainder -= GUIconstants.TURTLE_SOLID_MOVE_DISTANCE;
}
}
return root;
}
/**
* Enter the value of degrees to rotate. The degrees follow the unit circle.
* Positive degrees will rotate in the counter-clockwise direction, and
* negative degrees will rotate in the clockwise direction.
*
* @param degrees
* degrees, based on unit circle, to rotate
*/
public void rotate(double degrees) {
myOrientation += degrees;
if (myOrientation >= GUIconstants.UNIT_CIRCLE_DEGREES) {
myOrientation -= GUIconstants.UNIT_CIRCLE_DEGREES;
}
if (myOrientation < 0) {
myOrientation += GUIconstants.UNIT_CIRCLE_DEGREES;
}
myImage.setRotate(myImage.getRotate() - degrees);
}
/**
* Set the Turtle's pen to the "up" position
*/
public void penUp() {
myPen.setPenUp();
}
/**
* Set the Turtle's pen position to the "down" position
*/
public void penDown() {
myPen.setPenDown();
}
/**
* check if the pen is in down position.
*
* @return boolean where 'true' represents the pen being down, and 'false'
* represents the pen being up
*/
public boolean isPenDown() {
return myPen.getPenDownStatus();
}
/**
* return the current location of the turtle.
*
* @return Point2D containing the x,y coordinates of this turtle
*/
public Point2D getLocation() {
return new Point2D(myXLocation, myYLocation);
}
/**
* return the orientation (in degrees) of the turtle.
*
* @return
*/
public double getOrientation() {
return myOrientation;
}
/**
* Return this Turtle's SLogoPen
*
* @return
*/
public SLogoPen getPen() {
return myPen;
}
/**
* Set the orientation of the turtle
*
* @param degree
* direction in degrees, based on the unit circle, to point the
* turtle in
*/
public void setDirection(double degree) {
myOrientation = degree;
myImage.setRotate(-degree + GUIconstants.TURTLE_IMAGE_CORRECTION_VALUE);
}
/**
* Display the turtle's image
*
* @return
*/
public ImageView display() {
return myImage;
}
/**
* set the image of the turtle
*
* @param imageFileName
*/
public void setImage(String imageFileName) {
try {
myImage.setImage(new Image(this.getClass()
.getResource(imageFileName).toExternalForm()));
} catch (IllegalArgumentException e) {
// turn this into error
System.out.println("Turtle image not found.\n" + e.getMessage());
} catch (NullPointerException e) {
// turn this into error
System.out.println("Turtle image not found.\n" + e.getMessage());
}
}
/**
* set the x-location of the turtle
*
* @param x
*/
public void setX(double x) {
myXLocation = x;
myImage.setX(x);
}
/**
* set the y-location of the turtle
*
* @param y
*/
public void setY(double y) {
myYLocation = y;
myImage.setY(y);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.appservice.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.annotation.JsonFlatten;
import com.azure.core.management.Resource;
import com.azure.core.util.CoreUtils;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.appservice.models.HostingEnvironmentProfile;
import com.azure.resourcemanager.appservice.models.KeyVaultSecretStatus;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.OffsetDateTime;
import java.util.List;
/** SSL certificate for an app. */
@JsonFlatten
@Fluent
public class CertificateInner extends Resource {
@JsonIgnore private final ClientLogger logger = new ClientLogger(CertificateInner.class);
/*
* Friendly name of the certificate.
*/
@JsonProperty(value = "properties.friendlyName", access = JsonProperty.Access.WRITE_ONLY)
private String friendlyName;
/*
* Subject name of the certificate.
*/
@JsonProperty(value = "properties.subjectName", access = JsonProperty.Access.WRITE_ONLY)
private String subjectName;
/*
* Host names the certificate applies to.
*/
@JsonProperty(value = "properties.hostNames")
private List<String> hostNames;
/*
* Pfx blob.
*/
@JsonProperty(value = "properties.pfxBlob")
private byte[] pfxBlob;
/*
* App name.
*/
@JsonProperty(value = "properties.siteName", access = JsonProperty.Access.WRITE_ONLY)
private String siteName;
/*
* Self link.
*/
@JsonProperty(value = "properties.selfLink", access = JsonProperty.Access.WRITE_ONLY)
private String selfLink;
/*
* Certificate issuer.
*/
@JsonProperty(value = "properties.issuer", access = JsonProperty.Access.WRITE_ONLY)
private String issuer;
/*
* Certificate issue Date.
*/
@JsonProperty(value = "properties.issueDate", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime issueDate;
/*
* Certificate expiration date.
*/
@JsonProperty(value = "properties.expirationDate", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime expirationDate;
/*
* Certificate password.
*/
@JsonProperty(value = "properties.password")
private String password;
/*
* Certificate thumbprint.
*/
@JsonProperty(value = "properties.thumbprint", access = JsonProperty.Access.WRITE_ONLY)
private String thumbprint;
/*
* Is the certificate valid?.
*/
@JsonProperty(value = "properties.valid", access = JsonProperty.Access.WRITE_ONLY)
private Boolean valid;
/*
* Raw bytes of .cer file
*/
@JsonProperty(value = "properties.cerBlob", access = JsonProperty.Access.WRITE_ONLY)
private byte[] cerBlob;
/*
* Public key hash.
*/
@JsonProperty(value = "properties.publicKeyHash", access = JsonProperty.Access.WRITE_ONLY)
private String publicKeyHash;
/*
* Specification for the App Service Environment to use for the
* certificate.
*/
@JsonProperty(value = "properties.hostingEnvironmentProfile", access = JsonProperty.Access.WRITE_ONLY)
private HostingEnvironmentProfile hostingEnvironmentProfile;
/*
* Key Vault Csm resource Id.
*/
@JsonProperty(value = "properties.keyVaultId")
private String keyVaultId;
/*
* Key Vault secret name.
*/
@JsonProperty(value = "properties.keyVaultSecretName")
private String keyVaultSecretName;
/*
* Status of the Key Vault secret.
*/
@JsonProperty(value = "properties.keyVaultSecretStatus", access = JsonProperty.Access.WRITE_ONLY)
private KeyVaultSecretStatus keyVaultSecretStatus;
/*
* Resource ID of the associated App Service plan, formatted as:
* "/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms"
+ "/{appServicePlanName}".
*/
@JsonProperty(value = "properties.serverFarmId")
private String serverFarmId;
/*
* CNAME of the certificate to be issued via free certificate
*/
@JsonProperty(value = "properties.canonicalName")
private String canonicalName;
/*
* Kind of resource.
*/
@JsonProperty(value = "kind")
private String kind;
/**
* Get the friendlyName property: Friendly name of the certificate.
*
* @return the friendlyName value.
*/
public String friendlyName() {
return this.friendlyName;
}
/**
* Get the subjectName property: Subject name of the certificate.
*
* @return the subjectName value.
*/
public String subjectName() {
return this.subjectName;
}
/**
* Get the hostNames property: Host names the certificate applies to.
*
* @return the hostNames value.
*/
public List<String> hostNames() {
return this.hostNames;
}
/**
* Set the hostNames property: Host names the certificate applies to.
*
* @param hostNames the hostNames value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withHostNames(List<String> hostNames) {
this.hostNames = hostNames;
return this;
}
/**
* Get the pfxBlob property: Pfx blob.
*
* @return the pfxBlob value.
*/
public byte[] pfxBlob() {
return CoreUtils.clone(this.pfxBlob);
}
/**
* Set the pfxBlob property: Pfx blob.
*
* @param pfxBlob the pfxBlob value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withPfxBlob(byte[] pfxBlob) {
this.pfxBlob = CoreUtils.clone(pfxBlob);
return this;
}
/**
* Get the siteName property: App name.
*
* @return the siteName value.
*/
public String siteName() {
return this.siteName;
}
/**
* Get the selfLink property: Self link.
*
* @return the selfLink value.
*/
public String selfLink() {
return this.selfLink;
}
/**
* Get the issuer property: Certificate issuer.
*
* @return the issuer value.
*/
public String issuer() {
return this.issuer;
}
/**
* Get the issueDate property: Certificate issue Date.
*
* @return the issueDate value.
*/
public OffsetDateTime issueDate() {
return this.issueDate;
}
/**
* Get the expirationDate property: Certificate expiration date.
*
* @return the expirationDate value.
*/
public OffsetDateTime expirationDate() {
return this.expirationDate;
}
/**
* Get the password property: Certificate password.
*
* @return the password value.
*/
public String password() {
return this.password;
}
/**
* Set the password property: Certificate password.
*
* @param password the password value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withPassword(String password) {
this.password = password;
return this;
}
/**
* Get the thumbprint property: Certificate thumbprint.
*
* @return the thumbprint value.
*/
public String thumbprint() {
return this.thumbprint;
}
/**
* Get the valid property: Is the certificate valid?.
*
* @return the valid value.
*/
public Boolean valid() {
return this.valid;
}
/**
* Get the cerBlob property: Raw bytes of .cer file.
*
* @return the cerBlob value.
*/
public byte[] cerBlob() {
return CoreUtils.clone(this.cerBlob);
}
/**
* Get the publicKeyHash property: Public key hash.
*
* @return the publicKeyHash value.
*/
public String publicKeyHash() {
return this.publicKeyHash;
}
/**
* Get the hostingEnvironmentProfile property: Specification for the App Service Environment to use for the
* certificate.
*
* @return the hostingEnvironmentProfile value.
*/
public HostingEnvironmentProfile hostingEnvironmentProfile() {
return this.hostingEnvironmentProfile;
}
/**
* Get the keyVaultId property: Key Vault Csm resource Id.
*
* @return the keyVaultId value.
*/
public String keyVaultId() {
return this.keyVaultId;
}
/**
* Set the keyVaultId property: Key Vault Csm resource Id.
*
* @param keyVaultId the keyVaultId value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withKeyVaultId(String keyVaultId) {
this.keyVaultId = keyVaultId;
return this;
}
/**
* Get the keyVaultSecretName property: Key Vault secret name.
*
* @return the keyVaultSecretName value.
*/
public String keyVaultSecretName() {
return this.keyVaultSecretName;
}
/**
* Set the keyVaultSecretName property: Key Vault secret name.
*
* @param keyVaultSecretName the keyVaultSecretName value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withKeyVaultSecretName(String keyVaultSecretName) {
this.keyVaultSecretName = keyVaultSecretName;
return this;
}
/**
* Get the keyVaultSecretStatus property: Status of the Key Vault secret.
*
* @return the keyVaultSecretStatus value.
*/
public KeyVaultSecretStatus keyVaultSecretStatus() {
return this.keyVaultSecretStatus;
}
/**
* Get the serverFarmId property: Resource ID of the associated App Service plan, formatted as:
* "/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms"
+ "/{appServicePlanName}".
*
* @return the serverFarmId value.
*/
public String serverFarmId() {
return this.serverFarmId;
}
/**
* Set the serverFarmId property: Resource ID of the associated App Service plan, formatted as:
* "/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms"
+ "/{appServicePlanName}".
*
* @param serverFarmId the serverFarmId value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withServerFarmId(String serverFarmId) {
this.serverFarmId = serverFarmId;
return this;
}
/**
* Get the canonicalName property: CNAME of the certificate to be issued via free certificate.
*
* @return the canonicalName value.
*/
public String canonicalName() {
return this.canonicalName;
}
/**
* Set the canonicalName property: CNAME of the certificate to be issued via free certificate.
*
* @param canonicalName the canonicalName value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withCanonicalName(String canonicalName) {
this.canonicalName = canonicalName;
return this;
}
/**
* Get the kind property: Kind of resource.
*
* @return the kind value.
*/
public String kind() {
return this.kind;
}
/**
* Set the kind property: Kind of resource.
*
* @param kind the kind value to set.
* @return the CertificateInner object itself.
*/
public CertificateInner withKind(String kind) {
this.kind = kind;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (hostingEnvironmentProfile() != null) {
hostingEnvironmentProfile().validate();
}
}
}
| |
/*
* Copyright 2015 sourcestream GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.sourcestream.movieDB.controller;
import android.app.Fragment;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Parcelable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.google.android.gms.analytics.HitBuilders;
import com.google.android.gms.analytics.Tracker;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import de.sourcestream.movieDB.MainActivity;
import de.sourcestream.movieDB.MovieDB;
import de.sourcestream.movieDB.R;
import de.sourcestream.movieDB.adapter.GenresAdapter;
import de.sourcestream.movieDB.model.GenresModel;
/**
* Genres list.
*/
public class GenresList extends Fragment implements AdapterView.OnItemClickListener {
private MainActivity activity;
private ProgressBar spinner;
private AbsListView listView;
private ArrayList<GenresModel> genresList;
private GenresAdapter genresAdapter;
private MovieList movieList;
private int backState;
private HttpURLConnection conn;
private Bundle save;
public GenresList() {
}
/**
* Called to do initial creation of a fragment.
* This is called after onAttach(Activity) and before onCreateView(LayoutInflater, ViewGroup, Bundle).
*
* @param savedInstanceState If the fragment is being re-created from a previous saved state, this is the state.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (savedInstanceState != null)
save = savedInstanceState.getBundle("save");
}
/**
* Called to have the fragment instantiate its user interface view.
*
* @param inflater sets the layout for the current view.
* @param container the container which holds the current view.
* @param savedInstanceState If non-null, this fragment is being re-constructed from a previous saved state as given here.
* Return the View for the fragment's UI, or null.
*/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
super.onCreateView(inflater, container, savedInstanceState);
View rootView = inflater.inflate(R.layout.genreslist, container, false);
activity = ((MainActivity) getActivity());
spinner = (ProgressBar) rootView.findViewById(R.id.progressBar);
Tracker t = ((MovieDB) activity.getApplication()).getTracker();
t.setScreenName("Genres");
t.send(new HitBuilders.ScreenViewBuilder().build());
return rootView;
}
/**
* @param savedInstanceState if the fragment is being re-created from a previous saved state, this is the state.
*/
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
movieList = new MovieList();
listView = (AbsListView) getActivity().findViewById(R.id.genresList);
if (save != null) {
backState = save.getInt("backState");
if (backState == 1) {
genresList = save.getParcelableArrayList("listData");
genresAdapter = new GenresAdapter(getActivity(), R.layout.genresrow, genresList);
}
}
if (backState == 0) {
updateList();
} else {
listView.setAdapter(genresAdapter);
}
getActivity().setTitle(getResources().getString(R.string.genresTitle));
listView.setOnItemClickListener(this);
}
/**
* Callback method to be invoked when an item in this AdapterView has been clicked.
*
* @param parent The AdapterView where the click happened.
* @param view The view within the AdapterView that was clicked (this will be a view provided by the adapter)
* @param position The position of the view in the adapter.
* @param id The row id of the item that was clicked.
*/
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
if (movieList.getCurrentList().equals("genre/" + genresList.get(position).getId() + "/movies"))
movieList.setBackState(1);
else {
movieList.setCurrentList("genre/" + genresList.get(position).getId() + "/movies");
movieList.setBackState(0);
}
movieList.setTitle(genresList.get(position).getName());
FragmentManager manager = getFragmentManager();
FragmentTransaction transaction = manager.beginTransaction();
Bundle args = new Bundle();
args.putString("currentList", "genresList");
movieList.setArguments(args);
transaction.replace(R.id.frame_container, movieList);
// add the current transaction to the back stack:
transaction.addToBackStack("genresList");
transaction.commit();
backState = 1;
}
/**
* This class handles the connection to our backend server.
* If the connection is successful we set the list data.
*/
class JSONAsyncTask extends AsyncTask<String, Void, Boolean> {
@Override
protected void onPreExecute() {
super.onPreExecute();
activity.showView(spinner);
}
@Override
protected Boolean doInBackground(String... urls) {
try {
URL url = new URL(urls[0]);
conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000 /* milliseconds */);
conn.setConnectTimeout(10000 /* milliseconds */);
conn.setRequestMethod("GET");
conn.setDoInput(true);
conn.connect();
int status = conn.getResponseCode();
if (status == 200) {
BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
StringBuilder sb = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
sb.append(line).append("\n");
}
br.close();
JSONObject jsonData = new JSONObject(sb.toString());
JSONArray genresArray = jsonData.getJSONArray("genres");
for (int i = 0; i < genresArray.length(); i++) {
JSONObject object = genresArray.getJSONObject(i);
GenresModel genre = new GenresModel();
genre.setName(object.getString("name"));
genre.setId(object.getInt("id"));
genresList.add(genre);
}
return true;
}
} catch (IOException | JSONException e) {
if (conn != null)
conn.disconnect();
} finally {
if (conn != null)
conn.disconnect();
}
return false;
}
protected void onPostExecute(Boolean result) {
activity.hideView(spinner);
if (!result) {
if (getResources() != null) {
Toast.makeText(getActivity(), getResources().getString(R.string.noConnection), Toast.LENGTH_LONG).show();
backState = 0;
}
} else {
genresAdapter.notifyDataSetChanged();
backState = 1;
}
}
}
/**
* Fired from the main activity. Makes a new request to the server.
* Sets list, adapter, timeout.
*/
public void updateList() {
if (getActivity() != null) {
listView = (AbsListView) getActivity().findViewById(R.id.genresList);
genresList = new ArrayList<>();
genresAdapter = new GenresAdapter(getActivity(), R.layout.genresrow, genresList);
listView.setAdapter(genresAdapter);
final JSONAsyncTask request = new JSONAsyncTask();
new Thread(new Runnable() {
public void run() {
try {
request.execute(MovieDB.url + "genre/movie/list?&api_key=" + MovieDB.key).get(10000, TimeUnit.MILLISECONDS);
} catch (TimeoutException | ExecutionException | InterruptedException e) {
request.cancel(true);
// we abort the http request, else it will cause problems and slow connection later
if (conn != null)
conn.disconnect();
if (spinner != null)
activity.hideView(spinner);
if (getActivity() != null) {
getActivity().runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(getActivity(), getResources().getString(R.string.timeout), Toast.LENGTH_SHORT).show();
}
});
}
backState = 0;
}
}
}).start();
}
}
/**
* Called to ask the fragment to save its current dynamic state,
* so it can later be reconstructed in a new instance of its process is restarted.
*
* @param outState Bundle in which to place your saved state.
*/
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// Used to avoid bug where we add item in the back stack
// and if we change orientation twice the item from the back stack has null values
if (save != null)
outState.putBundle("save", save);
else {
Bundle send = new Bundle();
send.putInt("backState", backState);
if (backState == 1) {
send.putParcelableArrayList("listData", genresList);
// used to restore the scroll listener variables
// Save scroll position
if (listView != null) {
Parcelable listState = listView.onSaveInstanceState();
send.putParcelable("listViewScroll", listState);
}
}
outState.putBundle("save", send);
}
}
/**
* If our connection was successful we want to save our list data,
* so when we click back it will be retained.
*/
public int getBackState() {
return backState;
}
public MovieList getMovieListView() {
return movieList;
}
/**
* Set empty adapter to free memory when this fragment is inactive
*/
public void onDestroyView() {
super.onDestroyView();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.hashtable;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.runtime.io.disk.iomanager.AbstractChannelReaderInputView;
import org.apache.flink.runtime.io.disk.iomanager.BlockChannelReader;
import org.apache.flink.runtime.io.disk.iomanager.FileIOChannel;
import org.apache.flink.runtime.io.disk.iomanager.HeaderlessChannelReaderInputView;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.memory.MemoryAllocationException;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.table.api.config.ExecutionConfigOptions;
import org.apache.flink.table.runtime.compression.BlockCompressionFactory;
import org.apache.flink.table.runtime.util.FileChannelUtil;
import org.apache.flink.table.runtime.util.MemorySegmentPool;
import org.apache.flink.util.MathUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.flink.core.memory.MemorySegmentFactory.allocateUnpooledSegment;
import static org.apache.flink.util.Preconditions.checkArgument;
/**
* Base table for {@link LongHybridHashTable} and {@link BinaryHashTable}.
*/
public abstract class BaseHybridHashTable implements MemorySegmentPool {
protected static final Logger LOG = LoggerFactory.getLogger(BaseHybridHashTable.class);
/**
* The maximum number of recursive partitionings that the join does before giving up.
*/
protected static final int MAX_RECURSION_DEPTH = 3;
/**
* The maximum number of partitions, which defines the spilling granularity. Each recursion,
* the data is divided maximally into that many partitions, which are processed in one chuck.
*/
protected static final int MAX_NUM_PARTITIONS = Byte.MAX_VALUE;
/**
* The minimum number of memory segments the hash join needs to be supplied with in order to
* work.
*/
private static final int MIN_NUM_MEMORY_SEGMENTS = 33;
protected final int initPartitionFanOut;
/**
* The owner to associate with the memory segment.
*/
private Object owner;
private final int avgRecordLen;
protected final long buildRowCount;
/**
* The total reserved number of memory segments available to the hash join.
*/
protected final int reservedNumBuffers;
/**
* The total max number of memory segments available to the hash join.
*/
private final int maxNumBuffers;
/**
* record number of the allocated segments from the floating pool.
*/
protected int allocatedFloatingNum;
private final int perRequestNumBuffers;
private final MemoryManager memManager;
/**
* The free memory segments currently available to the hash join.
*/
public final ArrayList<MemorySegment> availableMemory;
/**
* The I/O manager used to instantiate writers for the spilled partitions.
*/
protected final IOManager ioManager;
/**
* The size of the segments used by the hash join buckets. All segments must be of equal size to
* ease offset computations.
*/
protected final int segmentSize;
/**
* The queue of buffers that can be used for write-behind. Any buffer that is written
* asynchronously to disk is returned through this queue. hence
*/
protected final LinkedBlockingQueue<MemorySegment> buildSpillReturnBuffers;
public final int segmentSizeBits;
public final int segmentSizeMask;
/**
* Flag indicating that the closing logic has been invoked.
*/
protected AtomicBoolean closed = new AtomicBoolean();
/**
* Try to make the buildSide rows distinct.
*/
public final boolean tryDistinctBuildRow;
/**
* The recursion depth of the partition that is currently processed. The initial table
* has a recursion depth of 0. Partitions spilled from a table that is built for a partition
* with recursion depth <i>n</i> have a recursion depth of <i>n+1</i>.
*/
protected int currentRecursionDepth;
/**
* The number of buffers in the build spill return buffer queue that are actually not write behind buffers,
* but regular buffers that only have not yet returned. This is part of an optimization that the
* spilling code needs not wait until the partition is completely spilled before proceeding.
*/
protected int buildSpillRetBufferNumbers;
/**
* The reader for the spilled-file of the build partition that is currently read.
*/
protected HeaderlessChannelReaderInputView currentSpilledBuildSide;
/**
* The reader for the spilled-file of the probe partition that is currently read.
*/
protected AbstractChannelReaderInputView currentSpilledProbeSide;
/**
* The channel enumerator that is used while processing the current partition to create
* channels for the spill partitions it requires.
*/
protected FileIOChannel.Enumerator currentEnumerator;
protected final boolean compressionEnable;
protected final BlockCompressionFactory compressionCodecFactory;
protected final int compressionBlockSize;
protected transient long numSpillFiles;
protected transient long spillInBytes;
public BaseHybridHashTable(
Configuration conf,
Object owner,
MemoryManager memManager,
long reservedMemorySize,
long preferredMemorySize,
long perRequestMemorySize,
IOManager ioManager,
int avgRecordLen,
long buildRowCount,
boolean tryDistinctBuildRow) {
//TODO: read compression config from configuration
this.compressionEnable = conf.getBoolean(ExecutionConfigOptions.TABLE_EXEC_SPILL_COMPRESSION_ENABLED);
this.compressionCodecFactory = this.compressionEnable
? BlockCompressionFactory.createBlockCompressionFactory(BlockCompressionFactory.CompressionFactoryName.LZ4.toString())
: null;
this.compressionBlockSize = (int) MemorySize.parse(
conf.getString(ExecutionConfigOptions.TABLE_EXEC_SPILL_COMPRESSION_BLOCK_SIZE)).getBytes();
this.owner = owner;
this.avgRecordLen = avgRecordLen;
this.buildRowCount = buildRowCount;
this.tryDistinctBuildRow = tryDistinctBuildRow;
this.reservedNumBuffers = (int) (reservedMemorySize / memManager.getPageSize());
// some sanity checks first
checkArgument(reservedNumBuffers >= MIN_NUM_MEMORY_SEGMENTS);
this.maxNumBuffers = (int) (preferredMemorySize / memManager.getPageSize());
this.perRequestNumBuffers = (int) (perRequestMemorySize / memManager.getPageSize());
this.availableMemory = new ArrayList<>(this.reservedNumBuffers);
try {
List<MemorySegment> allocates = memManager.allocatePages(owner, this.reservedNumBuffers);
this.availableMemory.addAll(allocates);
allocates.clear();
} catch (MemoryAllocationException e) {
LOG.error("Out of memory", e);
throw new RuntimeException(e);
}
this.memManager = memManager;
this.ioManager = ioManager;
this.segmentSize = memManager.getPageSize();
checkArgument(MathUtils.isPowerOf2(segmentSize));
// take away the write behind buffers
this.buildSpillReturnBuffers = new LinkedBlockingQueue<>();
this.segmentSizeBits = MathUtils.log2strict(segmentSize);
this.segmentSizeMask = segmentSize - 1;
// open builds the initial table by consuming the build-side input
this.currentRecursionDepth = 0;
// create the partitions
this.initPartitionFanOut = Math.min(getPartitioningFanOutNoEstimates(), maxNumPartition());
this.closed.set(false);
LOG.info(String.format("Initialize hash table with %d memory segments, each size [%d], the reserved memory %d" +
" MB, the max memory %d MB, per allocate {} segments from floating memory pool.",
reservedNumBuffers, segmentSize, (long) reservedNumBuffers * segmentSize / 1024 / 1024,
(long) maxNumBuffers * segmentSize / 1024 / 1024), perRequestNumBuffers);
}
/**
* Bucket area need at-least one and data need at-least one.
* In the initialization phase, we can use (totalNumBuffers - numWriteBehindBuffers) Segments.
* However, in the buildTableFromSpilledPartition phase, only (totalNumBuffers - numWriteBehindBuffers - 2)
* can be used because two Buffers are needed to read the data.
*/
protected int maxNumPartition() {
return (availableMemory.size() + buildSpillRetBufferNumbers) / 2;
}
/**
* Gets the number of partitions to be used for an initial hash-table.
*/
private int getPartitioningFanOutNoEstimates() {
return Math.max(11, findSmallerPrime((int) Math.min(buildRowCount * avgRecordLen / (10 * segmentSize),
MAX_NUM_PARTITIONS)));
}
/**
* Let prime number be the numBuckets, to avoid partition hash and bucket hash congruences.
*/
private static int findSmallerPrime(int num) {
for (; num > 1; num--) {
if (isPrimeNumber(num)) {
return num;
}
}
return num;
}
private static boolean isPrimeNumber(int num){
if (num == 2) {
return true;
}
if (num < 2 || num % 2 == 0) {
return false;
}
for (int i = 3; i <= Math.sqrt(num); i += 2){
if (num % i == 0) {
return false;
}
}
return true;
}
/**
* Gets the next buffer to be used with the hash-table, either for an in-memory partition, or
* for the table buckets. This method returns <tt>null</tt>, if no more buffer is available.
* Spilling a partition may free new buffers then.
*
* @return The next buffer to be used by the hash-table, or null, if no buffer remains.
*/
public MemorySegment getNextBuffer() {
// check if the list directly offers memory
int s = this.availableMemory.size();
if (s > 0) {
return this.availableMemory.remove(s - 1);
}
// check if there are write behind buffers that actually are to be used for the hash table
if (this.buildSpillRetBufferNumbers > 0) {
// grab at least one, no matter what
MemorySegment toReturn;
try {
toReturn = this.buildSpillReturnBuffers.take();
} catch (InterruptedException iex) {
throw new RuntimeException("Hybrid Hash Join was interrupted while taking a buffer.");
}
this.buildSpillRetBufferNumbers--;
// grab as many more buffers as are available directly
MemorySegment currBuff;
while (this.buildSpillRetBufferNumbers > 0 && (currBuff = this.buildSpillReturnBuffers.poll()) != null) {
this.availableMemory.add(currBuff);
this.buildSpillRetBufferNumbers--;
}
return toReturn;
} else {
if (reservedNumBuffers + allocatedFloatingNum >= maxNumBuffers) {
//no more memory.
return null;
} else {
int requestNum = Math.min(perRequestNumBuffers, maxNumBuffers - reservedNumBuffers -
allocatedFloatingNum);
//apply for much more memory.
try {
List<MemorySegment> allocates = memManager.allocatePages(owner, requestNum);
this.availableMemory.addAll(allocates);
allocatedFloatingNum += allocates.size();
allocates.clear();
LOG.info("{} allocate {} floating segments successfully!", owner, requestNum);
} catch (MemoryAllocationException e) {
LOG.warn("BinaryHashMap can't allocate {} floating pages, and now used {} pages",
requestNum, reservedNumBuffers + allocatedFloatingNum, e);
//can't allocate much more memory.
return null;
}
if (this.availableMemory.size() > 0) {
return this.availableMemory.remove(this.availableMemory.size() - 1);
} else {
return null;
}
}
}
}
/**
* Bulk memory acquisition.
* NOTE: Failure to get memory will throw an exception.
*/
public MemorySegment[] getNextBuffers(int bufferSize) {
MemorySegment[] memorySegments = new MemorySegment[bufferSize];
for (int i = 0; i < bufferSize; i++) {
MemorySegment nextBuffer = getNextBuffer();
if (nextBuffer == null) {
throw new RuntimeException("No enough buffers!");
}
memorySegments[i] = nextBuffer;
}
return memorySegments;
}
protected MemorySegment getNotNullNextBuffer() {
MemorySegment buffer = getNextBuffer();
if (buffer == null) {
throw new RuntimeException("Bug in HybridHashJoin: No memory became available.");
}
return buffer;
}
/**
* This is the method called by the partitions to request memory to serialize records.
* It automatically spills partitions, if memory runs out.
*
* @return The next available memory segment.
*/
@Override
public MemorySegment nextSegment() {
final MemorySegment seg = getNextBuffer();
if (seg != null) {
return seg;
} else {
try {
spillPartition();
} catch (IOException ioex) {
throw new RuntimeException("Error spilling Hash Join Partition" + (ioex.getMessage() == null ?
"." : ": " + ioex.getMessage()), ioex);
}
MemorySegment fromSpill = getNextBuffer();
if (fromSpill == null) {
throw new RuntimeException("BUG in Hybrid Hash Join: Spilling did not free a buffer.");
} else {
return fromSpill;
}
}
}
@Override
public int pageSize() {
return segmentSize;
}
@Override
public void returnAll(List<MemorySegment> memory) {
for (MemorySegment segment : memory) {
if (segment != null) {
availableMemory.add(segment);
}
}
}
protected abstract int spillPartition() throws IOException;
/**
* This method makes sure that at least a certain number of memory segments is in the list of
* free segments.
* Free memory can be in the list of free segments, or in the return-queue where segments used
* to write behind are
* put. The number of segments that are in that return-queue, but are actually reclaimable is
* tracked. This method
* makes sure at least a certain number of buffers is reclaimed.
*
* @param minRequiredAvailable The minimum number of buffers that needs to be reclaimed.
*/
public void ensureNumBuffersReturned(final int minRequiredAvailable) {
if (minRequiredAvailable > this.availableMemory.size() + this.buildSpillRetBufferNumbers) {
throw new IllegalArgumentException("More buffers requested available than totally available.");
}
try {
while (this.availableMemory.size() < minRequiredAvailable) {
this.availableMemory.add(this.buildSpillReturnBuffers.take());
this.buildSpillRetBufferNumbers--;
}
} catch (InterruptedException iex) {
throw new RuntimeException("Hash Join was interrupted.");
}
}
/**
* Closes the hash table. This effectively releases all internal structures and closes all
* open files and removes them. The call to this method is valid both as a cleanup after the
* complete inputs were properly processed, and as an cancellation call, which cleans up
* all resources that are currently held by the hash join.
*/
public void close() {
// make sure that we close only once
if (!this.closed.compareAndSet(false, true)) {
return;
}
// clear the current probe side channel, if there is one
if (this.currentSpilledProbeSide != null) {
try {
this.currentSpilledProbeSide.getChannel().closeAndDelete();
} catch (Throwable t) {
LOG.warn("Could not close and delete the temp file for the current spilled partition probe side.", t);
}
}
// clear the memory in the partitions
clearPartitions();
// return the write-behind buffers
for (int i = 0; i < this.buildSpillRetBufferNumbers; i++) {
try {
this.availableMemory.add(this.buildSpillReturnBuffers.take());
} catch (InterruptedException iex) {
throw new RuntimeException("Hashtable closing was interrupted");
}
}
this.buildSpillRetBufferNumbers = 0;
}
protected abstract void clearPartitions();
public void free() {
if (this.closed.get()) {
memManager.release(availableMemory);
allocatedFloatingNum = 0;
} else {
throw new IllegalStateException("Cannot release memory until BinaryHashTable is closed!");
}
}
/**
* Free the memory not used.
*/
public void freeCurrent() {
int beforeReleaseNum = availableMemory.size();
memManager.release(availableMemory);
allocatedFloatingNum -= (beforeReleaseNum - availableMemory.size());
}
@VisibleForTesting
public List<MemorySegment> getFreedMemory() {
return this.availableMemory;
}
public void free(MemorySegment segment) {
this.availableMemory.add(segment);
}
public int remainBuffers() {
return availableMemory.size() + buildSpillRetBufferNumbers;
}
public long getUsedMemoryInBytes() {
return (reservedNumBuffers + allocatedFloatingNum - availableMemory.size()) *
((long) memManager.getPageSize());
}
public long getNumSpillFiles() {
return numSpillFiles;
}
public long getSpillInBytes() {
return spillInBytes;
}
/**
* Give up to one-sixth of the memory of the bucket area.
*/
public int maxInitBufferOfBucketArea(int partitions) {
return Math.max(1, ((reservedNumBuffers - 2) / 6) / partitions);
}
protected List<MemorySegment> readAllBuffers(FileIOChannel.ID id, int blockCount) throws IOException {
// we are guaranteed to stay in memory
ensureNumBuffersReturned(blockCount);
LinkedBlockingQueue<MemorySegment> retSegments = new LinkedBlockingQueue<>();
BlockChannelReader<MemorySegment> reader = FileChannelUtil.createBlockChannelReader(
ioManager, id, retSegments,
compressionEnable, compressionCodecFactory, compressionBlockSize, segmentSize);
for (int i = 0; i < blockCount; i++) {
reader.readBlock(availableMemory.remove(availableMemory.size() - 1));
}
reader.closeAndDelete();
final List<MemorySegment> buffers = new ArrayList<>();
retSegments.drainTo(buffers);
return buffers;
}
protected HeaderlessChannelReaderInputView createInputView(FileIOChannel.ID id, int blockCount, int lastSegmentLimit) throws IOException {
BlockChannelReader<MemorySegment> inReader = FileChannelUtil.createBlockChannelReader(
ioManager, id, new LinkedBlockingQueue<>(),
compressionEnable, compressionCodecFactory, compressionBlockSize, segmentSize);
return new HeaderlessChannelReaderInputView(inReader,
Arrays.asList(allocateUnpooledSegment(segmentSize), allocateUnpooledSegment(segmentSize)),
blockCount, lastSegmentLimit, false);
}
/**
* The level parameter is needed so that we can have different hash functions when we
* recursively apply the partitioning, so that the working set eventually fits into memory.
*/
public static int hash(int hashCode, int level) {
final int rotation = level * 11;
int code = Integer.rotateLeft(hashCode, rotation);
return code >= 0 ? code : -(code + 1);
}
}
| |
package com.compomics.util.experiment.identification.modification.mapping;
import com.compomics.util.experiment.biology.modifications.Modification;
import com.compomics.util.experiment.biology.modifications.ModificationProvider;
import com.compomics.util.experiment.biology.modifications.ModificationType;
import com.compomics.util.experiment.biology.proteins.Peptide;
import com.compomics.util.experiment.identification.matches.ModificationMatch;
import com.compomics.util.experiment.identification.modification.ModificationSiteMapping;
import com.compomics.util.experiment.io.identification.IdfileReader;
import com.compomics.util.parameters.identification.IdentificationParameters;
import com.compomics.util.parameters.identification.search.ModificationParameters;
import com.compomics.util.parameters.identification.search.SearchParameters;
import java.util.ArrayList;
import java.util.HashMap;
/**
* Function attempting to map modification localization based on their type.
*
* @author Marc Vaudel
* @author Harald Barsnes
*/
public class ModificationLocalizationMapper {
/**
* The mass added per amino acid as part of the reference mass when
* converting Dalton tolerances to ppm.
*/
public static final double MASS_PER_AA = 100.0;
/**
* Makes an initial modification mapping based on the search engine results
* and the compatibility to the searched modifications.
*
* @param peptide The peptide where the modification was found.
* @param expectedNames The expected modifications at each site.
* @param modNames The possible names for every modification match.
* @param identificationParameters The identification parameters.
* @param idfileReader The identification file reader.
* @param modificationProvider The modification provider to use.
*/
public static void modificationLocalization(
Peptide peptide,
HashMap<Integer, ArrayList<String>> expectedNames,
HashMap<ModificationMatch, ArrayList<String>> modNames,
IdentificationParameters identificationParameters,
IdfileReader idfileReader,
ModificationProvider modificationProvider
) {
SearchParameters searchParameters = identificationParameters.getSearchParameters();
ModificationParameters modificationParameters = searchParameters.getModificationParameters();
ModificationMatch[] modificationMatches = peptide.getVariableModifications();
int peptideLength = peptide.getSequence().length();
////////////////////////////////
// check nterm modifications
////////////////////////////////
ModificationMatch nTermModification = null;
for (ModificationMatch modMatch : modificationMatches) {
double refMass = ModificationMassMapper.getMass(
modMatch.getModification(),
idfileReader,
searchParameters,
modificationProvider
);
int modSite = modMatch.getSite();
if (modSite == 1) {
ArrayList<String> expectedNamesAtSite = expectedNames.get(0);
if (expectedNamesAtSite != null) {
ArrayList<String> filteredNamesAtSite = new ArrayList<>(expectedNamesAtSite.size());
for (String modName : expectedNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
if (Math.abs(modification.getMass() - refMass)
< searchParameters.getFragmentIonAccuracyInDaltons(MASS_PER_AA * peptideLength)) {
filteredNamesAtSite.add(modName);
}
}
for (String modName : filteredNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
if (modification.getModificationType().isNTerm()) {
boolean otherPossibleMod = false;
for (String tempName : modificationParameters.getAllNotFixedModifications()) {
if (!tempName.equals(modName)) {
Modification tempModification = modificationProvider.getModification(tempName);
if (tempModification.getMass() == modification.getMass()
&& !tempModification.getModificationType().isNTerm()) {
otherPossibleMod = true;
break;
}
}
}
if (!otherPossibleMod) {
nTermModification = modMatch;
modMatch.setModification(modName);
modMatch.setSite(0);
break;
}
}
}
if (nTermModification != null) {
break;
}
}
}
}
////////////////////////////////
// check cterm modifications
////////////////////////////////
ModificationMatch cTermModification = null;
for (ModificationMatch modMatch : peptide.getVariableModifications()) {
if (modMatch != nTermModification) {
double refMass = ModificationMassMapper.getMass(
modMatch.getModification(),
idfileReader,
searchParameters,
modificationProvider
);
int modSite = modMatch.getSite();
if (modSite == peptideLength) {
ArrayList<String> expectedNamesAtSite = expectedNames.get(peptideLength + 1);
if (expectedNamesAtSite != null) {
ArrayList<String> filteredNamesAtSite = new ArrayList<>(expectedNamesAtSite.size());
for (String modName : expectedNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
if (Math.abs(modification.getMass() - refMass)
< searchParameters.getFragmentIonAccuracyInDaltons(MASS_PER_AA * peptideLength)) {
filteredNamesAtSite.add(modName);
}
}
for (String modName : filteredNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
if (modification.getModificationType().isCTerm()) {
boolean otherPossibleMod = false;
for (String tempName : modificationParameters.getAllNotFixedModifications()) {
if (!tempName.equals(modName)) {
Modification tempModification = modificationProvider.getModification(tempName);
if (tempModification.getMass() == modification.getMass()
&& !tempModification.getModificationType().isCTerm()) {
otherPossibleMod = true;
break;
}
}
}
if (!otherPossibleMod) {
cTermModification = modMatch;
modMatch.setModification(modName);
modMatch.setSite(peptideLength + 1);
break;
}
}
}
if (cTermModification != null) {
break;
}
}
}
}
}
///////////////////////////////////////////////////////////////////
// Map the modifications according to search engine localization
///////////////////////////////////////////////////////////////////
// site to modification name map, including termini
HashMap<Integer, ArrayList<String>> siteToModMap = new HashMap<>(modificationMatches.length);
// site to modification match map, excluding termini
HashMap<Integer, ModificationMatch> siteToMatchMap = new HashMap<>(modificationMatches.length);
// modification match to site map, excluding termini
HashMap<ModificationMatch, Integer> matchToSiteMap = new HashMap<>(modificationMatches.length);
boolean allMapped = true;
for (ModificationMatch modMatch : modificationMatches) {
boolean mapped = false;
if (modMatch != nTermModification && modMatch != cTermModification) {
double refMass = ModificationMassMapper.getMass(
modMatch.getModification(),
idfileReader,
searchParameters,
modificationProvider
);
int modSite = modMatch.getSite();
boolean terminal = false;
ArrayList<String> expectedNamesAtSite = expectedNames.get(modSite);
if (expectedNamesAtSite != null) {
ArrayList<String> filteredNamesAtSite = new ArrayList<>(expectedNamesAtSite.size());
ArrayList<String> modificationAtSite = siteToModMap.get(modSite);
for (String modName : expectedNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
if (Math.abs(modification.getMass() - refMass)
< searchParameters.getFragmentIonAccuracyInDaltons(MASS_PER_AA * peptideLength)
&& (modificationAtSite == null || !modificationAtSite.contains(modName))) {
filteredNamesAtSite.add(modName);
}
}
if (filteredNamesAtSite.size() == 1) {
String modName = filteredNamesAtSite.get(0);
Modification modification = modificationProvider.getModification(modName);
ModificationType modificationType = modification.getModificationType();
if (modificationType.isNTerm() && nTermModification == null) {
nTermModification = modMatch;
mapped = true;
} else if (modificationType.isCTerm() && cTermModification == null) {
cTermModification = modMatch;
mapped = true;
} else if (!modificationType.isNTerm() && !modificationType.isCTerm()) {
matchToSiteMap.put(modMatch, modSite);
siteToMatchMap.put(modSite, modMatch);
mapped = true;
}
if (mapped) {
modMatch.setModification(modName);
if (modificationAtSite == null) {
modificationAtSite = new ArrayList<>(1);
siteToModMap.put(modSite, modificationAtSite);
}
modificationAtSite.add(modName);
}
}
if (!mapped) {
if (filteredNamesAtSite.isEmpty()) {
filteredNamesAtSite = expectedNamesAtSite;
}
if (modSite == 1) {
Double minDiff = null;
String bestPtmName = null;
for (String modName : filteredNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
if (modification.getModificationType().isNTerm() && nTermModification == null) {
double massError = Math.abs(refMass - modification.getMass());
if (massError <= searchParameters.getFragmentIonAccuracyInDaltons(MASS_PER_AA * peptideLength)
&& (minDiff == null || massError < minDiff)) {
bestPtmName = modName;
minDiff = massError;
}
}
}
if (bestPtmName != null) {
nTermModification = modMatch;
modMatch.setModification(bestPtmName);
terminal = true;
if (modificationAtSite == null) {
modificationAtSite = new ArrayList<>(1);
siteToModMap.put(modSite, modificationAtSite);
}
modificationAtSite.add(bestPtmName);
mapped = true;
}
} else if (modSite == peptideLength) {
Double minDiff = null;
String bestModName = null;
for (String modName : filteredNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
if (modification.getModificationType().isCTerm() && cTermModification == null) {
double massError = Math.abs(refMass - modification.getMass());
if (massError <= searchParameters.getFragmentIonAccuracyInDaltons(MASS_PER_AA * peptideLength)
&& (minDiff == null || massError < minDiff)) {
bestModName = modName;
minDiff = massError;
}
}
}
if (bestModName != null) {
cTermModification = modMatch;
modMatch.setModification(bestModName);
terminal = true;
if (modificationAtSite == null) {
modificationAtSite = new ArrayList<>(1);
siteToModMap.put(modSite, modificationAtSite);
}
modificationAtSite.add(bestModName);
mapped = true;
}
}
if (!terminal) {
Double minDiff = null;
String bestModName = null;
for (String modName : filteredNamesAtSite) {
Modification modification = modificationProvider.getModification(modName);
ModificationType modificationType = modification.getModificationType();
if (!modificationType.isCTerm()
&& !modificationType.isNTerm()
&& modNames.get(modMatch).contains(modName)
&& !siteToMatchMap.containsKey(modSite)) {
double massError = Math.abs(refMass - modification.getMass());
if (massError <= searchParameters.getFragmentIonAccuracyInDaltons(MASS_PER_AA * peptideLength)
&& (minDiff == null || massError < minDiff)) {
bestModName = modName;
minDiff = massError;
}
}
}
if (bestModName != null) {
modMatch.setModification(bestModName);
if (modificationAtSite == null) {
modificationAtSite = new ArrayList<>(1);
siteToModMap.put(modSite, modificationAtSite);
}
modificationAtSite.add(bestModName);
matchToSiteMap.put(modMatch, modSite);
siteToMatchMap.put(modSite, modMatch);
mapped = true;
}
}
}
}
}
if (!mapped) {
allMapped = false;
}
}
//////////////////////////////////////////////
// try to correct incompatible localizations
//////////////////////////////////////////////
if (!allMapped) {
HashMap<Integer, ArrayList<Integer>> remap = new HashMap<>(0);
for (ModificationMatch modMatch : peptide.getVariableModifications()) {
if (modMatch != nTermModification
&& modMatch != cTermModification
&& !matchToSiteMap.containsKey(modMatch)) {
int modSite = modMatch.getSite();
for (int candidateSite : expectedNames.keySet()) {
if (!siteToMatchMap.containsKey(candidateSite)) {
for (String modName : expectedNames.get(candidateSite)) {
if (modNames.get(modMatch).contains(modName)) {
Modification modification = modificationProvider.getModification(modName);
ModificationType modificationType = modification.getModificationType();
if ((!modificationType.isCTerm() || cTermModification == null)
&& (!modificationType.isNTerm() || nTermModification == null)) {
ArrayList<Integer> modSites = remap.get(modSite);
if (modSites == null) {
modSites = new ArrayList<>(2);
remap.put(modSite, modSites);
}
if (!modSites.contains(candidateSite)) {
modSites.add(candidateSite);
}
}
}
}
}
}
}
}
HashMap<Integer, Integer> correctedIndexes = ModificationSiteMapping.alignAll(remap);
for (ModificationMatch modMatch : peptide.getVariableModifications()) {
if (modMatch != nTermModification
&& modMatch != cTermModification
&& !matchToSiteMap.containsKey(modMatch)) {
Integer modSite = correctedIndexes.get(modMatch.getSite());
if (modSite != null) {
if (expectedNames.containsKey(modSite)) {
for (String modName : expectedNames.get(modSite)) {
if (modNames.get(modMatch).contains(modName)) {
ArrayList<String> taken = siteToModMap.get(modSite);
if (taken == null || !taken.contains(modName)) {
matchToSiteMap.put(modMatch, modSite);
modMatch.setModification(modName);
modMatch.setSite(modSite);
if (taken == null) {
taken = new ArrayList<>(1);
siteToModMap.put(modSite, taken);
}
taken.add(modName);
break;
}
}
}
}
}
}
}
}
}
}
| |
package com.mopub.mobileads;
import android.app.Activity;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.mopub.common.AdFormat;
import com.mopub.common.AdUrlGenerator;
import com.mopub.common.Constants;
import com.mopub.common.DataKeys;
import com.mopub.common.LocationService;
import com.mopub.common.MediationSettings;
import com.mopub.common.MoPub;
import com.mopub.common.MoPubReward;
import com.mopub.common.Preconditions;
import com.mopub.common.logging.MoPubLog;
import com.mopub.common.util.MoPubCollections;
import com.mopub.common.util.Reflection;
import com.mopub.network.AdRequest;
import com.mopub.network.AdResponse;
import com.mopub.network.MoPubNetworkError;
import com.mopub.network.Networking;
import com.mopub.network.TrackingRequest;
import com.mopub.volley.RequestQueue;
import com.mopub.volley.VolleyError;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
public class MoPubRewardedVideoManager {
private static MoPubRewardedVideoManager sInstance;
private static final int DEFAULT_LOAD_TIMEOUT = Constants.THIRTY_SECONDS_MILLIS;
@NonNull private final Handler mCallbackHandler;
@NonNull private WeakReference<Activity> mMainActivity;
@NonNull private final Context mContext;
@NonNull private final AdRequestStatusMapping mAdRequestStatus;
@NonNull private final RewardedVideoData mRewardedVideoData;
@Nullable private MoPubRewardedVideoListener mVideoListener;
@NonNull private final Set<MediationSettings> mGlobalMediationSettings;
@NonNull private final Map<String, Set<MediationSettings>> mInstanceMediationSettings;
@NonNull private final Handler mCustomEventTimeoutHandler;
@NonNull private final Map<String, Runnable> mTimeoutMap;
public static class RewardedVideoRequestListener implements AdRequest.Listener {
public final String adUnitId;
private final MoPubRewardedVideoManager mVideoManager;
public RewardedVideoRequestListener(MoPubRewardedVideoManager videoManager, String adUnitId) {
this.adUnitId = adUnitId;
this.mVideoManager = videoManager;
}
@Override
public void onSuccess(final AdResponse response) {
mVideoManager.onAdSuccess(response, adUnitId);
}
@Override
public void onErrorResponse(final VolleyError volleyError) {
mVideoManager.onAdError(volleyError, adUnitId);
}
}
private MoPubRewardedVideoManager(@NonNull Activity mainActivity, MediationSettings... mediationSettings) {
mMainActivity = new WeakReference<Activity>(mainActivity);
mContext = mainActivity.getApplicationContext();
mRewardedVideoData = new RewardedVideoData();
mCallbackHandler = new Handler(Looper.getMainLooper());
mGlobalMediationSettings = new HashSet<MediationSettings>();
MoPubCollections.addAllNonNull(mGlobalMediationSettings, mediationSettings);
mInstanceMediationSettings = new HashMap<String, Set<MediationSettings>>();
mCustomEventTimeoutHandler = new Handler();
mTimeoutMap = new HashMap<String, Runnable>();
mAdRequestStatus = new AdRequestStatusMapping();
}
public static synchronized void init(@NonNull Activity mainActivity, MediationSettings... mediationSettings) {
if (sInstance == null) {
sInstance = new MoPubRewardedVideoManager(mainActivity, mediationSettings);
} else {
MoPubLog.e("Tried to call initializeRewardedVideo more than once. Only the first " +
"initialization call has any effect.");
}
}
public static void updateActivity(@NonNull Activity activity) {
if (sInstance != null) {
sInstance.mMainActivity = new WeakReference<Activity>(activity);
} else {
logErrorNotInitialized();
}
}
/**
* Returns a global {@link MediationSettings} object of the type 'clazz', if one is registered.
* This method will only return an object if its type is identical to 'clazz', not if it is a
* subtype.
*
* @param clazz the exact Class of the {@link MediationSettings} instance to retrieve
* @return an instance of Class<T> or null if none is registered.
*/
@Nullable
public static <T extends MediationSettings> T getGlobalMediationSettings(@NonNull final Class<T> clazz) {
if (sInstance == null) {
logErrorNotInitialized();
return null;
}
for (final MediationSettings mediationSettings : sInstance.mGlobalMediationSettings) {
// The two classes must be of exactly equal types
if (clazz.equals(mediationSettings.getClass())) {
return clazz.cast(mediationSettings);
}
}
return null;
}
/**
* Returns an instance {@link MediationSettings} object of the type 'clazz', if one is
* registered. This method will only return an object if its type is identical to 'clazz', not
* if it is a subtype.
*
* @param clazz the exact Class of the {@link MediationSettings} instance to retrieve
* @param adUnitId String identifier used to obtain the appropriate instance MediationSettings
* @return an instance of Class<T> or null if none is registered.
*/
@Nullable
public static <T extends MediationSettings> T getInstanceMediationSettings(
@NonNull final Class<T> clazz, @NonNull final String adUnitId) {
if (sInstance == null) {
logErrorNotInitialized();
return null;
}
final Set<MediationSettings> instanceMediationSettings =
sInstance.mInstanceMediationSettings.get(adUnitId);
if (instanceMediationSettings == null) {
return null;
}
for (final MediationSettings mediationSettings : instanceMediationSettings) {
// The two classes must be of exactly equal types
if (clazz.equals(mediationSettings.getClass())) {
return clazz.cast(mediationSettings);
}
}
return null;
}
/**
* Sets the {@link MoPubRewardedVideoListener} that will receive events from the
* rewarded video system. Set this to null to stop receiving event callbacks.
*/
public static void setVideoListener(@Nullable MoPubRewardedVideoListener listener) {
if (sInstance != null) {
sInstance.mVideoListener = listener;
} else {
logErrorNotInitialized();
}
}
/**
* Builds an AdRequest for the given adUnitId and adds it to the singleton RequestQueue. This
* method will not make a new request if there is already a video loading for this adUnitId.
*
* @param adUnitId MoPub adUnitId String
* @param mediationSettings Optional instance-level MediationSettings to associate with the
* above adUnitId.
*/
public static void loadVideo(@NonNull String adUnitId, @Nullable final MediationSettings... mediationSettings) {
if (sInstance == null) {
logErrorNotInitialized();
return;
}
// If any instance MediationSettings have been specified, update the internal map.
// Note: This always clears the MediationSettings for the ad unit, whether or not any
// MediationSettings have been provided.
final Set<MediationSettings> newInstanceMediationSettings = new HashSet<MediationSettings>();
MoPubCollections.addAllNonNull(newInstanceMediationSettings, mediationSettings);
sInstance.mInstanceMediationSettings.put(adUnitId, newInstanceMediationSettings);
final AdUrlGenerator urlGenerator = new WebViewAdUrlGenerator(sInstance.mContext, false);
final String adUrlString = urlGenerator.withAdUnitId(adUnitId)
.withLocation(
LocationService.getLastKnownLocation(
sInstance.mContext,
MoPub.getLocationPrecision(),
MoPub.getLocationAwareness()
)
)
.generateUrlString(Constants.HOST);
loadVideo(adUnitId, adUrlString);
}
private static void loadVideo(@NonNull String adUnitId, @NonNull String adUrlString) {
if (sInstance == null) {
logErrorNotInitialized();
return;
}
if (sInstance.mAdRequestStatus.isLoading(adUnitId)) {
MoPubLog.d(String.format(Locale.US, "Did not queue rewarded video request for ad " +
"unit %s. A request is already pending.", adUnitId));
return;
}
// Issue MoPub request
final AdRequest request = new AdRequest(
adUrlString,
AdFormat.REWARDED_VIDEO,
adUnitId,
sInstance.mContext,
new RewardedVideoRequestListener(sInstance, adUnitId)
);
final RequestQueue requestQueue = Networking.getRequestQueue(sInstance.mContext);
requestQueue.add(request);
sInstance.mAdRequestStatus.markLoading(adUnitId);
}
public static boolean hasVideo(@NonNull String adUnitId) {
if (sInstance != null) {
final CustomEventRewardedVideo customEvent = sInstance.mRewardedVideoData.getCustomEvent(adUnitId);
return isPlayable(adUnitId, customEvent);
} else {
logErrorNotInitialized();
return false;
}
}
public static void showVideo(@NonNull String adUnitId) {
if (sInstance != null) {
final CustomEventRewardedVideo customEvent = sInstance.mRewardedVideoData.getCustomEvent(adUnitId);
if (isPlayable(adUnitId, customEvent)) {
sInstance.mAdRequestStatus.markPlayed(adUnitId);
customEvent.showVideo();
} else {
sInstance.failover(adUnitId, MoPubErrorCode.VIDEO_NOT_AVAILABLE);
}
} else {
logErrorNotInitialized();
}
}
private static boolean isPlayable(String adUnitId, @Nullable CustomEventRewardedVideo customEvent) {
return (sInstance != null
&& sInstance.mAdRequestStatus.canPlay(adUnitId)
&& customEvent != null
&& customEvent.hasVideoAvailable());
}
///// Ad Request / Response methods /////
private void onAdSuccess(AdResponse adResponse, String adUnitId) {
mAdRequestStatus.markLoaded(adUnitId,
adResponse.getFailoverUrl(),
adResponse.getImpressionTrackingUrl(),
adResponse.getClickTrackingUrl());
Integer timeoutMillis = adResponse.getAdTimeoutMillis();
if (timeoutMillis == null || timeoutMillis <= 0) {
timeoutMillis = DEFAULT_LOAD_TIMEOUT;
}
final String customEventClassName = adResponse.getCustomEventClassName();
if (customEventClassName == null) {
MoPubLog.e("Couldn't create custom event, class name was null.");
failover(adUnitId, MoPubErrorCode.ADAPTER_CONFIGURATION_ERROR);
return;
}
try {
// Instantiate a custom event
final CustomEventRewardedVideo customEvent =
Reflection.instantiateClassWithEmptyConstructor(
customEventClassName,
CustomEventRewardedVideo.class);
// Put important data into localExtras...
final Map<String, Object> localExtras = new TreeMap<String, Object>();
localExtras.put(DataKeys.AD_UNIT_ID_KEY, adUnitId);
// Set up timeout calls.
Runnable timeout = new Runnable() {
@Override
public void run() {
MoPubLog.d("Custom Event failed to load rewarded video in a timely fashion.");
onRewardedVideoLoadFailure(customEvent.getClass(), customEvent.getAdNetworkId(), MoPubErrorCode.NETWORK_TIMEOUT);
customEvent.onInvalidate();
}
};
mCustomEventTimeoutHandler.postDelayed(timeout, timeoutMillis);
mTimeoutMap.put(adUnitId, timeout);
// Load custom event - need an activity reference!
customEvent.loadCustomEvent(mMainActivity.get(), localExtras, adResponse.getServerExtras());
final CustomEventRewardedVideo.CustomEventRewardedVideoListener listener =
customEvent.getVideoListenerForSdk();
final String adNetworkId = customEvent.getAdNetworkId();
mRewardedVideoData.updateAdUnitCustomEventMapping(adUnitId, customEvent, listener, adNetworkId);
} catch (Exception e) {
MoPubLog.e(String.format(Locale.US, "Couldn't create custom event with class name %s", customEventClassName));
failover(adUnitId, MoPubErrorCode.ADAPTER_CONFIGURATION_ERROR);
}
}
private void onAdError(@NonNull VolleyError volleyError, @NonNull String adUnitId) {
MoPubErrorCode errorCode = MoPubErrorCode.INTERNAL_ERROR;
if (volleyError instanceof MoPubNetworkError) {
MoPubNetworkError err = (MoPubNetworkError) volleyError;
switch (err.getReason()) {
case NO_FILL:
case WARMING_UP:
errorCode = MoPubErrorCode.NO_FILL;
break;
case BAD_BODY:
case BAD_HEADER_DATA:
default:
errorCode = MoPubErrorCode.INTERNAL_ERROR;
}
}
failover(adUnitId, errorCode);
}
private void failover(@NonNull final String adUnitId, @NonNull final MoPubErrorCode errorCode) {
final String failoverUrl = mAdRequestStatus.getFailoverUrl(adUnitId);
mAdRequestStatus.markFail(adUnitId);
if (failoverUrl != null) {
loadVideo(adUnitId, failoverUrl);
} else if (mVideoListener != null) {
mVideoListener.onRewardedVideoLoadFailure(adUnitId, errorCode);
}
}
private void cancelTimeouts(@NonNull String moPubId) {
final Runnable runnable = mTimeoutMap.remove(moPubId);
if (runnable != null) { // We can't pass null or all callbacks will be removed.
mCustomEventTimeoutHandler.removeCallbacks(runnable);
}
}
//////// Listener methods that should be called by third-party SDKs. //////////
/**
* Notify the manager that a rewarded video loaded successfully.
*
* @param customEventClass - the Class of the third-party custom event object.
* @param thirdPartyId - the ad id of the third party SDK. This may be an empty String if the
* SDK does not use ad ids, zone ids, or a analogous concept.
* @param <T> - a class that extends {@link CustomEventRewardedVideo}. Only rewarded video
* custom events should use these methods.
*/
public static <T extends CustomEventRewardedVideo>
void onRewardedVideoLoadSuccess(@NonNull final Class<T> customEventClass, @NonNull final String thirdPartyId) {
postToInstance(new ForEachMoPubIdRunnable(customEventClass, thirdPartyId) {
@Override
protected void forEach(@NonNull final String moPubId) {
sInstance.cancelTimeouts(moPubId);
if (sInstance.mVideoListener != null) {
sInstance.mVideoListener.onRewardedVideoLoadSuccess(moPubId);
}
}
});
}
public static <T extends CustomEventRewardedVideo>
void onRewardedVideoLoadFailure(@NonNull final Class<T> customEventClass, final String thirdPartyId, final MoPubErrorCode errorCode) {
postToInstance(new ForEachMoPubIdRunnable(customEventClass, thirdPartyId) {
@Override
protected void forEach(@NonNull final String moPubId) {
sInstance.cancelTimeouts(moPubId);
sInstance.failover(moPubId, errorCode);
}
});
}
public static <T extends CustomEventRewardedVideo>
void onRewardedVideoStarted(@NonNull final Class<T> customEventClass, final String thirdPartyId) {
postToInstance(new ForEachMoPubIdRunnable(customEventClass, thirdPartyId) {
@Override
protected void forEach(@NonNull final String moPubId) {
if (sInstance.mVideoListener != null) {
sInstance.mVideoListener.onRewardedVideoStarted(moPubId);
}
TrackingRequest.makeTrackingHttpRequest(
sInstance.mAdRequestStatus.getImpressionTrackerUrlString(moPubId),
sInstance.mContext);
sInstance.mAdRequestStatus.clearImpressionUrl(moPubId);
}
});
}
public static <T extends CustomEventRewardedVideo>
void onRewardedVideoPlaybackError(@NonNull final Class<T> customEventClass, final String thirdPartyId, final MoPubErrorCode errorCode) {
postToInstance(new ForEachMoPubIdRunnable(customEventClass, thirdPartyId) {
@Override
protected void forEach(@NonNull final String moPubId) {
if (sInstance.mVideoListener != null) {
sInstance.mVideoListener.onRewardedVideoPlaybackError(moPubId, errorCode);
}
}
});
}
public static <T extends CustomEventRewardedVideo>
void onRewardedVideoClicked(@NonNull final Class<T> customEventClass, final String thirdPartyId) {
postToInstance(new ForEachMoPubIdRunnable(customEventClass, thirdPartyId) {
@Override
protected void forEach(@NonNull final String moPubId) {
TrackingRequest.makeTrackingHttpRequest(
sInstance.mAdRequestStatus.getClickTrackerUrlString(moPubId),
sInstance.mContext);
sInstance.mAdRequestStatus.clearClickUrl(moPubId);
}
});
}
public static <T extends CustomEventRewardedVideo>
void onRewardedVideoClosed(@NonNull final Class<T> customEventClass, final String thirdPartyId) {
postToInstance(new ForEachMoPubIdRunnable(customEventClass, thirdPartyId) {
@Override
protected void forEach(@NonNull final String moPubId) {
if (sInstance.mVideoListener != null) {
sInstance.mVideoListener.onRewardedVideoClosed(moPubId);
}
}
});
}
public static <T extends CustomEventRewardedVideo>
void onRewardedVideoCompleted(@NonNull final Class<T> customEventClass, final String thirdPartyId, @NonNull final MoPubReward moPubReward) {
// Unlike other callbacks in this class, only call the listener once with all the MoPubIds in the matching set.
postToInstance(new Runnable() {
@Override
public void run() {
final Set<String> moPubIds = sInstance.mRewardedVideoData.getMoPubIdsForAdNetwork(customEventClass, thirdPartyId);
Set<String> rewarded = new HashSet<String>(moPubIds);
if (sInstance.mVideoListener != null) {
sInstance.mVideoListener.onRewardedVideoCompleted(rewarded, moPubReward);
}
}
});
}
/**
* Posts the runnable to the static instance's handler. Does nothing if sInstance is null.
* Useful for ensuring that all event callbacks run on the main thread.
* The {@link Runnable} can assume that sInstance is non-null.
*/
private static void postToInstance(@NonNull Runnable runnable) {
if (sInstance != null) {
sInstance.mCallbackHandler.post(runnable);
}
}
private static void logErrorNotInitialized() {
MoPubLog.e("MoPub rewarded video was not initialized. You must call " +
"MoPub.initializeRewardedVideo() before loading or attempting " +
"to play video ads.");
}
/**
* A runnable that calls forEach on each member of the rewarded video data passed to the runnable.
*/
private static abstract class ForEachMoPubIdRunnable implements Runnable {
@NonNull private final Class<? extends CustomEventRewardedVideo> mCustomEventClass;
@NonNull private final String mThirdPartyId;
ForEachMoPubIdRunnable(@NonNull final Class<? extends CustomEventRewardedVideo> customEventClass,
@NonNull final String thirdPartyId) {
Preconditions.checkNotNull(customEventClass);
Preconditions.checkNotNull(thirdPartyId);
mCustomEventClass = customEventClass;
mThirdPartyId = thirdPartyId;
}
protected abstract void forEach(@NonNull final String moPubId);
@Override
public void run() {
final Set<String> moPubIds = sInstance.mRewardedVideoData
.getMoPubIdsForAdNetwork(mCustomEventClass, mThirdPartyId);
for (String moPubId : moPubIds) {
forEach(moPubId);
}
}
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.stream.output.sink;
import io.siddhi.core.config.SiddhiAppContext;
import io.siddhi.core.exception.ConnectionUnavailableException;
import io.siddhi.core.exception.SiddhiAppCreationException;
import io.siddhi.core.stream.ServiceDeploymentInfo;
import io.siddhi.core.stream.StreamJunction;
import io.siddhi.core.stream.output.sink.distributed.DistributedTransport;
import io.siddhi.core.util.ExceptionUtil;
import io.siddhi.core.util.SiddhiConstants;
import io.siddhi.core.util.StringUtil;
import io.siddhi.core.util.config.ConfigReader;
import io.siddhi.core.util.parser.helper.QueryParserHelper;
import io.siddhi.core.util.snapshot.state.EmptyStateHolder;
import io.siddhi.core.util.snapshot.state.State;
import io.siddhi.core.util.snapshot.state.StateFactory;
import io.siddhi.core.util.snapshot.state.StateHolder;
import io.siddhi.core.util.statistics.LatencyTracker;
import io.siddhi.core.util.statistics.ThroughputTracker;
import io.siddhi.core.util.statistics.metrics.Level;
import io.siddhi.core.util.transport.BackoffRetryCounter;
import io.siddhi.core.util.transport.DynamicOptions;
import io.siddhi.core.util.transport.OptionHolder;
import io.siddhi.query.api.annotation.Element;
import io.siddhi.query.api.definition.StreamDefinition;
import org.apache.log4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* This is a Sink type. these let users to publish events according to
* some type. this type can either be local, jms or ws (or any custom extension)
*
* @param <S> current state of the Sink
*/
public abstract class Sink<S extends State> implements SinkListener {
private static final Logger LOG = Logger.getLogger(Sink.class);
protected AtomicBoolean isTryingToConnect = new AtomicBoolean(false);
private StreamDefinition streamDefinition;
private String type;
private SinkMapper mapper;
private SinkHandler handler;
private DistributedTransport.ConnectionCallback connectionCallback = null;
private StreamJunction streamJunction;
private SiddhiAppContext siddhiAppContext;
private OnErrorAction onErrorAction;
private BackoffRetryCounter backoffRetryCounter = new BackoffRetryCounter();
private AtomicBoolean isConnected = new AtomicBoolean(false);
private AtomicBoolean isShutdown = new AtomicBoolean(false);
private ThreadLocal<DynamicOptions> trpDynamicOptions;
private ScheduledExecutorService scheduledExecutorService;
private ThroughputTracker throughputTracker;
private LatencyTracker mapperLatencyTracker;
private StateHolder<S> stateHolder;
private ServiceDeploymentInfo serviceDeploymentInfo;
public final void init(StreamDefinition streamDefinition, String type, OptionHolder transportOptionHolder,
ConfigReader sinkConfigReader, SinkMapper sinkMapper, String mapType,
OptionHolder mapOptionHolder, SinkHandler sinkHandler, List<Element> payloadElementList,
ConfigReader mapperConfigReader, Map<String, String> deploymentProperties,
StreamJunction streamJunction, SiddhiAppContext siddhiAppContext) {
this.streamDefinition = streamDefinition;
this.type = type;
this.streamJunction = streamJunction;
this.siddhiAppContext = siddhiAppContext;
this.onErrorAction = OnErrorAction.valueOf(transportOptionHolder
.getOrCreateOption(SiddhiConstants.ANNOTATION_ELEMENT_ON_ERROR, "LOG")
.getValue().toUpperCase());
if (siddhiAppContext.getStatisticsManager() != null) {
this.throughputTracker = QueryParserHelper.createThroughputTracker(siddhiAppContext,
streamDefinition.getId(),
SiddhiConstants.METRIC_INFIX_SINKS, type);
this.mapperLatencyTracker = QueryParserHelper.createLatencyTracker(siddhiAppContext,
streamDefinition.getId(),
SiddhiConstants.METRIC_INFIX_SINK_MAPPERS,
type + SiddhiConstants.METRIC_DELIMITER + mapType);
}
StateFactory<S> stateFactory = init(streamDefinition, transportOptionHolder, sinkConfigReader,
siddhiAppContext);
stateHolder = siddhiAppContext.generateStateHolder(streamDefinition.getId() + "-" +
this.getClass().getName(), stateFactory);
if (sinkMapper != null) {
sinkMapper.init(streamDefinition, mapType, mapOptionHolder, payloadElementList, this,
mapperConfigReader, mapperLatencyTracker, transportOptionHolder, siddhiAppContext);
this.mapper = sinkMapper;
}
if (sinkHandler != null) {
sinkHandler.initSinkHandler(siddhiAppContext.getName(), streamDefinition,
new SinkHandlerCallback(sinkMapper), siddhiAppContext);
this.handler = sinkHandler;
}
scheduledExecutorService = siddhiAppContext.getScheduledExecutorService();
serviceDeploymentInfo = exposeServiceDeploymentInfo();
if (serviceDeploymentInfo != null) {
serviceDeploymentInfo.addDeploymentProperties(deploymentProperties);
} else if (!deploymentProperties.isEmpty()) {
throw new SiddhiAppCreationException("Deployment properties '" + deploymentProperties +
"' are defined for sink '" + type + "' which does not expose a service");
}
}
public abstract Class[] getSupportedInputEventClasses();
public final void initOnlyTransport(StreamDefinition streamDefinition, OptionHolder transportOptionHolder,
ConfigReader sinkConfigReader, String type,
DistributedTransport.ConnectionCallback connectionCallback,
Map<String, String> deploymentProperties, SiddhiAppContext siddhiAppContext) {
this.type = type;
this.streamDefinition = streamDefinition;
this.connectionCallback = connectionCallback;
this.siddhiAppContext = siddhiAppContext;
init(streamDefinition, transportOptionHolder, sinkConfigReader, siddhiAppContext);
scheduledExecutorService = siddhiAppContext.getScheduledExecutorService();
serviceDeploymentInfo = exposeServiceDeploymentInfo();
if (serviceDeploymentInfo != null) {
serviceDeploymentInfo.addDeploymentProperties(deploymentProperties);
} else if (!deploymentProperties.isEmpty()) {
throw new SiddhiAppCreationException("Deployment properties '" + deploymentProperties +
"' are defined for sink '" + type + "' which does not expose a service");
}
}
/**
* Give information to the deployment about the service exposed by the sink.
*
* @return ServiceDeploymentInfo Service related information to the deployment
*/
protected abstract ServiceDeploymentInfo exposeServiceDeploymentInfo();
/**
* Supported dynamic options by the transport
*
* @return the list of supported dynamic option keys
*/
public abstract String[] getSupportedDynamicOptions();
/**
* Will be called for initialing the {@link Sink}
*
* @param outputStreamDefinition containing stream definition bind to the {@link Sink}
* @param optionHolder Option holder containing static and dynamic options related to the {@link Sink}
* @param sinkConfigReader this hold the {@link Sink} extensions configuration reader.
* @param siddhiAppContext {@link SiddhiAppContext} of the parent siddhi app.
*/
protected abstract StateFactory<S> init(StreamDefinition outputStreamDefinition, OptionHolder optionHolder,
ConfigReader sinkConfigReader, SiddhiAppContext siddhiAppContext);
@Override
public final void publish(Object payload) {
if (mapperLatencyTracker != null && Level.BASIC.compareTo(siddhiAppContext.getRootMetricsLevel()) <= 0) {
mapperLatencyTracker.markOut();
}
DynamicOptions dynamicOptions = trpDynamicOptions.get();
if (isConnected()) {
S state = stateHolder.getState();
try {
publish(payload, dynamicOptions, state);
if (throughputTracker != null && Level.BASIC.compareTo(siddhiAppContext.getRootMetricsLevel()) <= 0) {
throughputTracker.eventIn();
}
} catch (ConnectionUnavailableException e) {
setConnected(false);
if (connectionCallback != null) {
connectionCallback.connectionFailed();
}
LOG.error(ExceptionUtil.getMessageWithContext(e, siddhiAppContext) +
" Connection unavailable at Sink '" + type + "' at '" + streamDefinition.getId() +
"', will retry connection immediately.", e);
connectWithRetry();
publish(payload);
} finally {
stateHolder.returnState(state);
}
} else if (isTryingToConnect.get()) {
onError(payload, dynamicOptions, new ConnectionUnavailableException("Connection unavailable at Sink '"
+ type + "' at '" + streamDefinition.getId() + "'. Connection retrying is in progress " +
"from a different thread."));
} else if (!isShutdown.get()) {
connectWithRetry();
publish(payload);
}
}
/**
* Sending events via output transport
*
* @param payload payload of the event
* @param dynamicOptions of the event constructing the payload
* @param state current state of the sink
* @throws ConnectionUnavailableException throw when connections are unavailable.
*/
public abstract void publish(Object payload, DynamicOptions dynamicOptions, S state)
throws ConnectionUnavailableException;
/**
* Called to connect to the backend before events are published
*
* @throws ConnectionUnavailableException if it cannot connect to the backend
*/
public abstract void connect() throws ConnectionUnavailableException;
/**
* Called after all publishing is done, or when ConnectionUnavailableException is thrown
*/
public abstract void disconnect();
/**
* Called at the end to clean all the resources consumed
*/
public abstract void destroy();
public final String getType() {
return type;
}
public final SinkMapper getMapper() {
return mapper;
}
public final SinkHandler getHandler() {
return handler;
}
public void connectWithRetry() {
if (!isConnected.get()) {
isTryingToConnect.set(true);
try {
connect();
setConnected(true);
isTryingToConnect.set(false);
if (connectionCallback != null) {
connectionCallback.connectionEstablished();
}
backoffRetryCounter.reset();
} catch (ConnectionUnavailableException e) {
LOG.error(StringUtil.removeCRLFCharacters(ExceptionUtil.getMessageWithContext(e, siddhiAppContext) +
", error while connecting at Sink '" + type + "' at '" + streamDefinition.getId() +
"', will retry in '" + backoffRetryCounter.getTimeInterval() + "'."), e);
scheduledExecutorService.schedule(new Runnable() {
@Override
public void run() {
connectWithRetry();
}
}, backoffRetryCounter.getTimeIntervalMillis(), TimeUnit.MILLISECONDS);
backoffRetryCounter.increment();
} catch (RuntimeException e) {
LOG.error(StringUtil.removeCRLFCharacters(ExceptionUtil.getMessageWithContext(e, siddhiAppContext)) +
", error while connecting at Sink '" + StringUtil.removeCRLFCharacters(type) + "' at '" +
StringUtil.removeCRLFCharacters(streamDefinition.getId()) + "'.", e);
throw e;
}
}
}
public void shutdown() {
isShutdown.set(true);
disconnect();
destroy();
setConnected(false);
isTryingToConnect.set(false);
if (connectionCallback != null) {
connectionCallback.connectionFailed();
}
}
void setTrpDynamicOptions(ThreadLocal<DynamicOptions> trpDynamicOptions) {
this.trpDynamicOptions = trpDynamicOptions;
}
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
public boolean isConnected() {
return isConnected.get();
}
public void setConnected(boolean connected) {
isConnected.set(connected);
}
@Deprecated
void onError(Object payload, Exception e) {
DynamicOptions dynamicOptions = trpDynamicOptions.get();
if (dynamicOptions == null && onErrorAction == OnErrorAction.WAIT) {
LOG.error("Error on '" + siddhiAppContext.getName() + "'. Dropping event at Sink '"
+ type + "' at '" + streamDefinition.getId() + "' as its does not support 'WAIT' "
+ "as it uses deprecated onError(Object payload, Exception e) method!, "
+ "events dropped '" + payload + "'");
} else {
onError(payload, dynamicOptions, e);
}
}
public void onError(Object payload, DynamicOptions dynamicOptions, Exception e) {
OnErrorAction errorAction = onErrorAction;
if (e instanceof ConnectionUnavailableException) {
setConnected(false);
} else if (errorAction == OnErrorAction.WAIT) {
LOG.error("Error on '" + siddhiAppContext.getName() + "'. Dropping event at Sink '"
+ type + "' at '" + streamDefinition.getId() + "' as on.error='wait' does not handle " +
"'" + e.getClass().getName() + "' error: '" + e.getMessage() + "', events dropped '" +
payload + "'", e);
return;
}
try {
switch (errorAction) {
case STREAM:
streamJunction.handleError(dynamicOptions.getEvent(), e);
break;
case WAIT:
LOG.error(StringUtil.removeCRLFCharacters(ExceptionUtil.getMessageWithContext(e, siddhiAppContext) +
", error while connecting at Sink '" + type + "' at '" + streamDefinition.getId() +
"', will retry in '" + backoffRetryCounter.getTimeInterval() + "'."), e);
retryWait(backoffRetryCounter.getTimeIntervalMillis());
backoffRetryCounter.increment();
if (!isConnected.get()) {
isTryingToConnect.set(true);
try {
connect();
setConnected(true);
isTryingToConnect.set(false);
if (connectionCallback != null) {
connectionCallback.connectionEstablished();
}
backoffRetryCounter.reset();
} catch (Exception ex) {
onError(payload, dynamicOptions, ex);
}
}
trpDynamicOptions.set(dynamicOptions);
try {
publish(payload);
} finally {
trpDynamicOptions.remove();
}
break;
case LOG:
default:
LOG.error("Error on '" + siddhiAppContext.getName() + "'. Dropping event at Sink '"
+ type + "' at '" + streamDefinition.getId() + "' as its still trying to reconnect!, "
+ "events dropped '" + payload + "'");
break;
}
} catch (Throwable t) {
LOG.error("Error on '" + siddhiAppContext.getName() + "'. Dropping event at Sink '"
+ type + "' at '" + streamDefinition.getId() + "' as there is an issue when " +
"handling the error: '" + t.getMessage() + "', events dropped '" + payload + "'", e);
}
}
public List<ServiceDeploymentInfo> getServiceDeploymentInfoList() {
if (serviceDeploymentInfo != null) {
List<ServiceDeploymentInfo> list = new ArrayList<>(1);
list.add(serviceDeploymentInfo);
return list;
} else {
return new ArrayList<>(0);
}
}
private void retryWait(long waitTime) {
try {
Thread.sleep(waitTime);
} catch (InterruptedException ignored) {
}
}
/**
* Different Type of On Error Actions
*/
public enum OnErrorAction {
LOG,
WAIT,
STREAM
}
public boolean isStateful() {
return stateHolder != null && !(stateHolder instanceof EmptyStateHolder);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.unorderedreceiver;
import io.netty.buffer.ByteBuf;
import java.io.IOException;
import java.util.Iterator;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.exception.OutOfMemoryException;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.ops.ExchangeFragmentContext;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.ops.MetricDef;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.ops.OperatorStats;
import org.apache.drill.exec.physical.MinorFragmentEndpoint;
import org.apache.drill.exec.physical.config.UnorderedReceiver;
import org.apache.drill.exec.proto.BitControl.FinishedReceiver;
import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
import org.apache.drill.exec.proto.UserBitShared.RecordBatchDef;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.exec.record.CloseableRecordBatch;
import org.apache.drill.exec.record.RawFragmentBatch;
import org.apache.drill.exec.record.RawFragmentBatchProvider;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.record.TypedFieldId;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.record.WritableBatch;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.record.selection.SelectionVector4;
import org.apache.drill.exec.rpc.RpcException;
import org.apache.drill.exec.rpc.RpcOutcomeListener;
import org.apache.drill.exec.testing.ControlsInjector;
import org.apache.drill.exec.testing.ControlsInjectorFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UnorderedReceiverBatch implements CloseableRecordBatch {
private static final Logger logger = LoggerFactory.getLogger(UnorderedReceiverBatch.class);
private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(UnorderedReceiverBatch.class);
private final RecordBatchLoader batchLoader;
private final RawFragmentBatchProvider fragProvider;
private final ExchangeFragmentContext context;
private BatchSchema schema;
private final OperatorStats stats;
private boolean first = true;
private final UnorderedReceiver config;
private final OperatorContext oContext;
// Represents last outcome of next(). If an Exception is thrown
// during the method's execution a value IterOutcome.STOP will be assigned.
private IterOutcome lastOutcome;
public enum Metric implements MetricDef {
BYTES_RECEIVED,
NUM_SENDERS;
@Override
public int metricId() {
return ordinal();
}
}
public UnorderedReceiverBatch(ExchangeFragmentContext context,
RawFragmentBatchProvider fragProvider, UnorderedReceiver config)
throws OutOfMemoryException {
this.fragProvider = fragProvider;
this.context = context;
// In normal case, batchLoader does not require an allocator. However, in
// case of splitAndTransfer of a value vector,
// we may need an allocator for the new offset vector. Therefore, here we
// pass the context's allocator to batchLoader.
oContext = context.newOperatorContext(config);
this.batchLoader = new RecordBatchLoader(oContext.getAllocator());
this.stats = oContext.getStats();
this.stats.setLongStat(Metric.NUM_SENDERS, config.getNumSenders());
this.config = config;
// Register this operator's buffer allocator so that incoming buffers are
// owned by this allocator
context.getBuffers().getCollector(config.getOppositeMajorFragmentId())
.setAllocator(oContext.getAllocator());
}
@Override
public FragmentContext getContext() {
return context;
}
@Override
public BatchSchema getSchema() {
return schema;
}
@Override
public int getRecordCount() {
return batchLoader.getRecordCount();
}
@Override
public void kill(boolean sendUpstream) {
if (sendUpstream) {
informSenders();
}
fragProvider.kill(context);
}
@Override
public Iterator<VectorWrapper<?>> iterator() {
return batchLoader.iterator();
}
@Override
public SelectionVector2 getSelectionVector2() {
throw new UnsupportedOperationException();
}
@Override
public SelectionVector4 getSelectionVector4() {
throw new UnsupportedOperationException();
}
@Override
public TypedFieldId getValueVectorId(SchemaPath path) {
return batchLoader.getValueVectorId(path);
}
@Override
public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
return batchLoader.getValueAccessorById(clazz, ids);
}
private RawFragmentBatch getNextBatch() throws IOException {
try {
injector.injectInterruptiblePause(context.getExecutionControls(), "waiting-for-data", logger);
return fragProvider.getNext();
} catch(InterruptedException e) {
// Preserve evidence that the interruption occurred so that code higher up
// on the call stack can learn of the
// interruption and respond to it if it wants to.
Thread.currentThread().interrupt();
return null;
}
}
@Override
public IterOutcome next() {
batchLoader.resetRecordCount();
stats.startProcessing();
try {
RawFragmentBatch batch;
try {
stats.startWait();
batch = getNextBatch();
// skip over empty batches. we do this since these are basically control messages.
while (batch != null && batch.getHeader().getDef().getRecordCount() == 0
&& (!first || batch.getHeader().getDef().getFieldCount() == 0)) {
batch = getNextBatch();
}
} finally {
stats.stopWait();
}
first = false;
if (batch == null) {
lastOutcome = IterOutcome.NONE;
batchLoader.zero();
if (!context.getExecutorState().shouldContinue()) {
lastOutcome = IterOutcome.STOP;
}
return lastOutcome;
}
if (context.getAllocator().isOverLimit()) {
lastOutcome = IterOutcome.OUT_OF_MEMORY;
return lastOutcome;
}
RecordBatchDef rbd = batch.getHeader().getDef();
boolean schemaChanged = batchLoader.load(rbd, batch.getBody());
// TODO: Clean: DRILL-2933: That load(...) no longer throws
// SchemaChangeException, so check/clean catch clause below.
stats.addLongStat(Metric.BYTES_RECEIVED, batch.getByteCount());
batch.release();
if (schemaChanged) {
this.schema = batchLoader.getSchema();
stats.batchReceived(0, rbd.getRecordCount(), true);
lastOutcome = IterOutcome.OK_NEW_SCHEMA;
} else {
stats.batchReceived(0, rbd.getRecordCount(), false);
lastOutcome = IterOutcome.OK;
}
return lastOutcome;
} catch (SchemaChangeException | IOException ex) {
context.getExecutorState().fail(ex);
lastOutcome = IterOutcome.STOP;
return lastOutcome;
} catch (Exception e) {
lastOutcome = IterOutcome.STOP;
throw e;
} finally {
stats.stopProcessing();
}
}
@Override
public WritableBatch getWritableBatch() {
return batchLoader.getWritableBatch();
}
@Override
public void close() {
batchLoader.clear();
}
@Override
public VectorContainer getOutgoingContainer() {
throw new UnsupportedOperationException(
String.format("You should not call getOutgoingContainer() for class %s",
getClass().getCanonicalName()));
}
@Override
public VectorContainer getContainer() {
return batchLoader.getContainer();
}
private void informSenders() {
logger.info("Informing senders of request to terminate sending.");
FragmentHandle handlePrototype = FragmentHandle.newBuilder()
.setMajorFragmentId(config.getOppositeMajorFragmentId())
.setQueryId(context.getHandle().getQueryId())
.build();
for (MinorFragmentEndpoint providingEndpoint : config.getProvidingEndpoints()) {
FragmentHandle sender = FragmentHandle.newBuilder(handlePrototype)
.setMinorFragmentId(providingEndpoint.getId())
.build();
FinishedReceiver finishedReceiver = FinishedReceiver.newBuilder()
.setReceiver(context.getHandle())
.setSender(sender)
.build();
context.getController()
.getTunnel(providingEndpoint.getEndpoint())
.informReceiverFinished(new OutcomeListener(), finishedReceiver);
}
}
// TODO: Code duplication. MergingRecordBatch has the same implementation.
private class OutcomeListener implements RpcOutcomeListener<Ack> {
@Override
public void failed(RpcException ex) {
logger.warn("Failed to inform upstream that receiver is finished");
}
@Override
public void success(Ack value, ByteBuf buffer) {
// Do nothing
}
@Override
public void interrupted(InterruptedException e) {
if (context.getExecutorState().shouldContinue()) {
String errMsg = "Received an interrupt RPC outcome while sending ReceiverFinished message";
logger.error(errMsg, e);
context.getExecutorState().fail(new RpcException(errMsg, e));
}
}
}
@Override
public void dump() {
logger.error("UnorderedReceiverBatch[batchLoader={}, schema={}]", batchLoader, schema);
}
@Override
public boolean hasFailed() {
return lastOutcome == IterOutcome.STOP;
}
}
| |
/* ContourUtilsTest.java
Copyright 2011 Andrew Rosenberg
This file is part of the AuToBI prosodic analysis package.
AuToBI is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
AuToBI is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with AuToBI. If not, see <http://www.gnu.org/licenses/>.
*/
package edu.cuny.qc.speech.AuToBI.util;
import edu.cuny.qc.speech.AuToBI.AuToBI;
import edu.cuny.qc.speech.AuToBI.core.AuToBIException;
import edu.cuny.qc.speech.AuToBI.core.AuToBITask;
import edu.cuny.qc.speech.AuToBI.core.Word;
import edu.cuny.qc.speech.AuToBI.featureset.*;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.*;
/**
* Test class for edu.cuny.qc.speech.AuToBI.util.AuToBIUtilsTest
*/
public class AuToBIUtilsTest {
private static final String TEST_DIR = System.getenv().get("AUTOBI_TEST_DIR");
private AuToBI autobi;
@Before
public void setup() {
autobi = new AuToBI();
initializeAuToBITasksWithoutClassifiers(autobi);
}
public void initializeAuToBITasksWithoutClassifiers(AuToBI autobi) {
AuToBITask task = new AuToBITask();
task.setTrueFeature("nominal_PitchAccent");
task.setHypFeature("hyp_pitch_accent_location");
task.setConfFeature("hyp_pitch_accent_location_conf");
task.setDistFeature("hyp_pitch_accent_location_dist");
task.setFeatureSet(new PitchAccentDetectionFeatureSet());
autobi.getTasks().put("pitch_accent_detection", task);
task = new AuToBITask();
task.setTrueFeature("nominal_PitchAccentType");
task.setHypFeature("hyp_pitch_accent_type");
task.setConfFeature("hyp_pitch_accent_type_conf");
task.setDistFeature("hyp_pitch_accent_type_dist");
task.setFeatureSet(new PitchAccentClassificationFeatureSet());
autobi.getTasks().put("pitch_accent_classification", task);
task = new AuToBITask();
task.setTrueFeature("nominal_IntonationalPhraseBoundary");
task.setHypFeature("hyp_IP_location");
task.setConfFeature("hyp_IP_location_conf");
task.setDistFeature("hyp_IP_location_dist");
task.setFeatureSet(new IntonationalPhraseBoundaryDetectionFeatureSet());
autobi.getTasks().put("intonational_phrase_boundary_detection", task);
task = new AuToBITask();
task.setTrueFeature("nominal_IntermediatePhraseBoundary");
task.setHypFeature("hyp_ip_location");
task.setConfFeature("hyp_ip_location_conf");
task.setDistFeature("hyp_ip_location_dist");
task.setFeatureSet(new IntermediatePhraseBoundaryDetectionFeatureSet());
autobi.getTasks().put("intermediate_phrase_boundary_detection", task);
task = new AuToBITask();
task.setTrueFeature("nominal_PhraseAccent");
task.setHypFeature("hyp_phrase_accent");
task.setConfFeature("hyp_phrase_accent_conf");
task.setDistFeature("hyp_phrase_accent_dist");
task.setFeatureSet(new PhraseAccentClassificationFeatureSet());
autobi.getTasks().put("phrase_accent_classification", task);
task = new AuToBITask();
task.setTrueFeature("nominal_PhraseAccentBoundaryTone");
task.setHypFeature("hyp_pabt");
task.setConfFeature("hyp_pabt_conf");
task.setDistFeature("hyp_pabt_dist");
task.setFeatureSet(new PhraseAccentBoundaryToneClassificationFeatureSet());
autobi.getTasks().put("phrase_accent_boundary_tone_classification", task);
}
@Test
public void testLog() {
AuToBIUtils.log("test");
}
@Test
public void testError() {
AuToBIUtils.error("test error");
}
@Test
public void testInfo() {
AuToBIUtils.info("test info");
}
@Test
public void testGlobWithNullPattern() {
List<String> files = AuToBIUtils.glob(null);
assertEquals(0, files.size());
}
@Test
public void testGlobWithHomeDirectory() {
List<String> files = AuToBIUtils.glob("~/*");
assertTrue(files.size() > 0);
}
@Test
public void testGlobWithBaseDirectory() {
List<String> files = AuToBIUtils.glob("/*");
assertTrue(files.size() > 0);
}
@Test
public void testGlobFromCurrentDirectory() {
List<String> files = AuToBIUtils.glob("*");
assertTrue(files.size() > 0);
}
@Test
public void testGlobWithCurrentDirectory() {
List<String> files = AuToBIUtils.glob("./*");
assertTrue(files.size() > 0);
}
@Test
public void testGlobWithHigherDirectory() {
List<String> files = AuToBIUtils.glob("../*");
assertTrue(files.size() > 0);
}
@Test
public void testGlobSingleFileHasMultiple() {
try {
AuToBIUtils.globSingleFile(TEST_DIR + "/*");
fail();
} catch (AuToBIException e) {
// Expected.
}
}
@Test
public void testGlobSingleFileHasMultipleWithComma() {
try {
AuToBIUtils.globSingleFile(TEST_DIR + "/*,test.file");
fail();
} catch (AuToBIException e) {
// Expected.
}
}
@Test
public void testGlobSingleFile() {
try {
String file = AuToBIUtils.globSingleFile(TEST_DIR + "/sineWithNoise.wav");
assertEquals(TEST_DIR + "/sineWithNoise.wav", file);
} catch (AuToBIException e) {
fail(e.getMessage());
}
}
@Test
public void testGlobSingleFileMatchesNone() {
try {
AuToBIUtils.globSingleFile(TEST_DIR + "/FALSE_FILENAME.wav");
fail();
} catch (AuToBIException e) {
// Expected.
}
}
@Test
public void testJoin() {
List<String> s = new ArrayList<String>();
s.add("one");
s.add("two");
String joined = AuToBIUtils.join(s, ",");
assertEquals("one,two", joined);
}
@Test
public void testEmptyJoin() {
List<String> s = new ArrayList<String>();
String joined = AuToBIUtils.join(s, ",");
assertEquals("", joined);
}
@Test
public void testMergeAuToBIHypothesesPitchAccentLocationACCENTED() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_pitch_accent_location", "ACCENTED");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("ACCENTED", w.getAttribute("hyp_pitch_accent"));
}
@Test
public void testMergeAuToBIHypothesesPitchAccentLocationDEACCENTED() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_pitch_accent_location", "DEACCENTED");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("DEACCENTED", w.getAttribute("hyp_pitch_accent"));
}
@Test
public void testMergeAuToBIHypothesesPitchAccentLocationDEACCENTEDWithType() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_pitch_accent_location", "DEACCENTED");
w.setAttribute("hyp_pitch_accent_type", "L+H*");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("DEACCENTED", w.getAttribute("hyp_pitch_accent"));
}
@Test
public void testMergeAuToBIHypothesesPitchAccentLocationACCENTEDWithType() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_pitch_accent_location", "ACCENTED");
w.setAttribute("hyp_pitch_accent_type", "L+H*");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("L+H*", w.getAttribute("hyp_pitch_accent"));
}
@Test
public void testMergeAuToBIHypothesesPitchAccentLocationACCENTEDWithConf() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_pitch_accent_location", "ACCENTED");
w.setAttribute("hyp_pitch_accent_location_conf", 0.7);
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("ACCENTED: 0.7", w.getAttribute("hyp_pitch_accent"));
}
@Test
public void testMergeAuToBIHypothesesPitchAccentLocationDEACCENTEDWithConf() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_pitch_accent_location", "DEACCENTED");
w.setAttribute("hyp_pitch_accent_location_conf", 0.7);
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
// Bit of a floating point error here.
assertEquals("ACCENTED: 0.30000000000000004", w.getAttribute("hyp_pitch_accent"));
}
@Test
public void testMergeAuToBIHypothesesIPLocation() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_IP_location", "INTONATIONAL_BOUNDARY");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
// Bit of a floating point error here.
assertEquals("INTONATIONAL_BOUNDARY", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesIPLocationWithConf() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_IP_location", "INTONATIONAL_BOUNDARY");
w.setAttribute("hyp_IP_location_conf", 0.7);
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("BOUNDARY: 0.7", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesIPLocationFALSEWithConf() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_IP_location", "NONBOUNDARY");
w.setAttribute("hyp_IP_location_conf", 0.7);
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
// Bit of a floating point error here.
assertEquals("BOUNDARY: 0.30000000000000004", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesIntermediateLocationNoIP() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_ip_location", "INTERMEDIATE_BOUNDARY");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("INTERMEDIATE_BOUNDARY", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesIntermediateLocationWithFALSEIP() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_IP_location", "NONBOUNDARY");
w.setAttribute("hyp_ip_location", "INTERMEDIATE_BOUNDARY");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("INTERMEDIATE_BOUNDARY", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesIntermediateLocationWithTRUEIP() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_IP_location", "INTONATIONAL_BOUNDARY");
w.setAttribute("hyp_ip_location", "INTERMEDIATE_BOUNDARY");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("INTONATIONAL_BOUNDARY", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesBoundaryToneWithTRUEIP() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_IP_location", "INTONATIONAL_BOUNDARY");
w.setAttribute("hyp_pabt", "L-H%");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("L-H%", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesBoundaryToneWithFALSEIP() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_IP_location", "NONBOUNDARY");
w.setAttribute("hyp_boundary_tone", "H%");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("NONBOUNDARY", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesBoundaryTone() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_pabt", "L-H%");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("L-H%", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesPhraseAccentWithTRUEInterP() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_ip_location", "INTERMEDIATE_BOUNDARY");
w.setAttribute("hyp_phrase_accent", "L-");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("L-", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesPhraseAccentWithFALSEInterP() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_ip_location", "NONBOUNDARY");
w.setAttribute("hyp_phrase_accent", "H-");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("NONBOUNDARY", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testMergeAuToBIHypothesesPhraseAccent() {
Word w = new Word(0.0, 0.1, "hello");
List<Word> words = new ArrayList<Word>();
words.add(w);
w.setAttribute("hyp_phrase_accent", "L-");
try {
AuToBIUtils.mergeAuToBIHypotheses(autobi, words);
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals("L-", w.getAttribute("hyp_phrase_boundary"));
}
@Test
public void testGetPitchAccentDetectionTask() {
AuToBITask task = AuToBIUtils.getPitchAccentDetectionTask(null);
assertEquals("nominal_PitchAccent", task.getTrueFeature());
assertEquals("hyp_pitch_accent_location", task.getHypFeature());
assertTrue(task.getFeatureSet() instanceof PitchAccentDetectionFeatureSet);
}
@Test
public void testGetPitchAccentClassificationTask() {
AuToBITask task = AuToBIUtils.getPitchAccentClassificationTask(null);
assertEquals("nominal_PitchAccentType", task.getTrueFeature());
assertEquals("hyp_pitch_accent_type", task.getHypFeature());
assertTrue(task.getFeatureSet() instanceof PitchAccentClassificationFeatureSet);
}
@Test
public void testGetIntonationalPhraseDetectionTask() {
AuToBITask task = AuToBIUtils.getIntonationalPhraseDetectionTask(null);
assertEquals("nominal_IntonationalPhraseBoundary", task.getTrueFeature());
assertEquals("hyp_intonational_phrase_boundary", task.getHypFeature());
assertTrue(task.getFeatureSet() instanceof IntonationalPhraseBoundaryDetectionFeatureSet);
}
@Test
public void testGetIntermediatePhraseDetectionTask() {
AuToBITask task = AuToBIUtils.getIntermediatePhraseDetectionTask(null);
assertEquals("nominal_IntermediatePhraseBoundary", task.getTrueFeature());
assertEquals("hyp_intermediate_phrase_boundary", task.getHypFeature());
assertTrue(task.getFeatureSet() instanceof IntermediatePhraseBoundaryDetectionFeatureSet);
}
@Test
public void testGetPhraseAccentClassificationTask() {
AuToBITask task = AuToBIUtils.getPhraseAccentClassificationTask(null);
assertEquals("nominal_PhraseAccent", task.getTrueFeature());
assertEquals("hyp_phrase_accent", task.getHypFeature());
assertTrue(task.getFeatureSet() instanceof PhraseAccentClassificationFeatureSet);
}
@Test
public void testGetPABTClassificationTask() {
AuToBITask task = AuToBIUtils.getPABTClassificationTask(null);
assertEquals("nominal_PhraseAccentBoundaryTone", task.getTrueFeature());
assertEquals("hyp_phrase_accent_boundary_tone", task.getHypFeature());
assertTrue(task.getFeatureSet() instanceof PhraseAccentBoundaryToneClassificationFeatureSet);
}
@Test
public void testParseFeatureNameWorksOnAtomicFeatures() {
List<String> params = null;
try {
params = AuToBIUtils.parseFeatureName("f0");
} catch (AuToBIException e) {
fail();
}
assertEquals(1, params.size());
assertEquals("f0", params.get(0));
}
@Test
public void testParseFeatureNameWorksOnOneParameterFeatures() {
List<String> params = null;
try {
params = AuToBIUtils.parseFeatureName("test[f0]");
} catch (AuToBIException e) {
fail();
}
assertEquals(2, params.size());
assertEquals("test", params.get(0));
assertEquals("f0", params.get(1));
}
@Test
public void testParseFeatureNameWorksOnNestedParameterFeatures() {
List<String> params = null;
try {
params = AuToBIUtils.parseFeatureName("test[f0[booo]]");
} catch (AuToBIException e) {
fail();
}
assertEquals(2, params.size());
assertEquals("test", params.get(0));
assertEquals("f0[booo]", params.get(1));
}
@Test
public void testParseFeatureNameWorksOnMultipleParameterFeatures() {
List<String> params = null;
try {
params = AuToBIUtils.parseFeatureName("test[f0,I,spectrum]");
} catch (AuToBIException e) {
fail();
}
assertEquals(4, params.size());
assertEquals("test", params.get(0));
assertEquals("f0", params.get(1));
assertEquals("I", params.get(2));
assertEquals("spectrum", params.get(3));
}
@Test
public void testParseFeatureNameWorksOnMultipleNestedParameterFeatures() {
List<String> params = null;
try {
params = AuToBIUtils.parseFeatureName("test[f0[I,spectrum],I[f0,spectrum]]");
} catch (AuToBIException e) {
fail(e.getMessage());
}
assertEquals(3, params.size());
assertEquals("test", params.get(0));
assertEquals("f0[I,spectrum]", params.get(1));
assertEquals("I[f0,spectrum]", params.get(2));
}
@Test
public void testParseFeatureNameFailsOnMisMatchedBrackets() {
List<String> params = null;
try {
params = AuToBIUtils.parseFeatureName("f0[test");
fail();
} catch (AuToBIException e) {
assertTrue(true);
}
}
}
| |
/**
* *****************************************************************************
*
* Copyright (c) 2004-2010 Oracle Corporation.
*
* All rights reserved. This program and the accompanying materials are made
* available under the terms of the Eclipse Public License v1.0 which
* accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
*
* Kohsuke Kawaguchi
*
*
******************************************************************************
*/
package hudson.model;
import hudson.RelativePath;
import hudson.XmlFile;
import hudson.BulkChange;
import hudson.PluginWrapper;
import hudson.Util;
import static hudson.Util.singleQuote;
import hudson.diagnosis.OldDataMonitor;
import hudson.model.listeners.SaveableListener;
import hudson.util.ReflectionUtils;
import hudson.util.ReflectionUtils.Parameter;
import hudson.views.ListViewColumn;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.*;
import org.springframework.util.StringUtils;
import org.jvnet.tiger_types.Types;
import org.apache.commons.io.IOUtils;
import javax.servlet.ServletException;
import javax.servlet.RequestDispatcher;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Locale;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.beans.Introspector;
import java.net.URL;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
/**
* Metadata about a configurable instance.
*
* <p> {@link Descriptor} is an object that has metadata about a
* {@link Describable} object, and also serves as a factory (in a way this
* relationship is similar to {@link Object}/{@link Class} relationship.
*
* A {@link Descriptor}/{@link Describable} combination is used throughout in
* Hudson to implement a configuration/extensibility mechanism.
*
* <p> Take the list view support as an example, which is implemented in
* {@link ListView} class. Whenever a new view is created, a new
* {@link ListView} instance is created with the configuration information. This
* instance gets serialized to XML, and this instance will be called to render
* the view page. This is the job of {@link Describable} — each instance
* represents a specific configuration of a view (what projects are in it,
* regular expression, etc.)
*
* <p> For Hudson to create such configured {@link ListView} instance, Hudson
* needs another object that captures the metadata of {@link ListView}, and that
* is what a {@link Descriptor} is for. {@link ListView} class has a singleton
* descriptor, and this descriptor helps render the configuration form, remember
* system-wide configuration, and works as a factory.
*
* <p> {@link Descriptor} also usually have its associated views.
*
*
* <h2>Persistence</h2> <p> {@link Descriptor} can persist data just by storing
* them in fields. However, it is the responsibility of the derived type to
* properly invoke {@link #save()} and {@link #load()}.
*
* <h2>Reflection Enhancement</h2> {@link Descriptor} defines addition to the
* standard Java reflection and provides reflective information about its
* corresponding {@link Describable}. These are primarily used by tag libraries
* to keep the Jelly scripts concise.
*
* @author Kohsuke Kawaguchi
* @see Describable
*/
public abstract class Descriptor<T extends Describable<T>> implements Saveable {
/**
* Up to Hudson 1.61 this was used as the primary persistence mechanism.
* Going forward Hudson simply persists all the non-transient fields of
* {@link Descriptor}, just like others, so this is pointless.
*
* @deprecated since 2006-11-16
*/
@Deprecated
private transient Map<String, Object> properties;
/**
* The class being described by this descriptor.
*/
public transient final Class<? extends T> clazz;
private transient final Map<String, String> checkMethods = new ConcurrentHashMap<String, String>();
/**
* Lazily computed list of properties on {@link #clazz} and on the
* descriptor itself.
*/
private transient volatile Map<String, PropertyType> propertyTypes, globalPropertyTypes;
/**
* Help file redirect, keyed by the field name to the path.
*
* @see #getHelpFile(String)
*/
private final Map<String, String> helpRedirect = new HashMap<String, String>();
/**
* Represents a readable property on {@link Describable}.
*/
public static final class PropertyType {
//TODO: review and check whether we can do it private
public final Class clazz;
//TODO: review and check whether we can do it private
public final Type type;
private volatile Class itemType;
public Class getClazz() {
return clazz;
}
public Type getType() {
return type;
}
PropertyType(Class clazz, Type type) {
this.clazz = clazz;
this.type = type;
}
PropertyType(Field f) {
this(f.getType(), f.getGenericType());
}
PropertyType(Method getter) {
this(getter.getReturnType(), getter.getGenericReturnType());
}
public Enum[] getEnumConstants() {
return (Enum[]) clazz.getEnumConstants();
}
/**
* If the property is a collection/array type, what is an item type?
*/
public Class getItemType() {
if (itemType == null) {
itemType = computeItemType();
}
return itemType;
}
private Class computeItemType() {
if (clazz.isArray()) {
return clazz.getComponentType();
}
if (Collection.class.isAssignableFrom(clazz)) {
Type col = Types.getBaseClass(type, Collection.class);
if (col instanceof ParameterizedType) {
return Types.erasure(Types.getTypeArgument(col, 0));
} else {
return Object.class;
}
}
return null;
}
/**
* Returns {@link Descriptor} whose 'clazz' is the same as
* {@link #getItemType() the item type}.
*/
public Descriptor getItemTypeDescriptor() {
return Hudson.getInstance().getDescriptor(getItemType());
}
public Descriptor getItemTypeDescriptorOrDie() {
return Hudson.getInstance().getDescriptorOrDie(getItemType());
}
/**
* Returns all the descriptors that produce types assignable to the item
* type.
*/
public List<? extends Descriptor> getApplicableDescriptors() {
return Hudson.getInstance().getDescriptorList(clazz);
}
}
protected Descriptor(Class<? extends T> clazz) {
this.clazz = clazz;
// doing this turns out to be very error prone,
// as field initializers in derived types will override values.
// load();
}
/**
* Infers the type of the corresponding {@link Describable} from the outer
* class. This version works when you follow the common convention, where a
* descriptor is written as the static nested class of the describable
* class.
*
* @since 1.278
*/
protected Descriptor() {
this.clazz = (Class<T>) getClass().getEnclosingClass();
if (clazz == null) {
throw new AssertionError(getClass() + " doesn't have an outer class. Use the constructor that takes the Class object explicitly.");
}
// detect an type error
Type bt = Types.getBaseClass(getClass(), Descriptor.class);
if (bt instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) bt;
// this 't' is the closest approximation of T of Descriptor<T>.
Class t = Types.erasure(pt.getActualTypeArguments()[0]);
if (!t.isAssignableFrom(clazz)) {
throw new AssertionError("Outer class " + clazz + " of " + getClass() + " is not assignable to " + t + ". Perhaps wrong outer class?");
}
}
// detect a type error. this Descriptor is supposed to be returned from getDescriptor(), so make sure its type match up.
// this prevents a bug like http://www.nabble.com/Creating-a-new-parameter-Type-%3A-Masked-Parameter-td24786554.html
try {
Method getd = clazz.getMethod("getDescriptor");
if (!getd.getReturnType().isAssignableFrom(getClass())) {
throw new AssertionError(getClass() + " must be assignable to " + getd.getReturnType());
}
} catch (NoSuchMethodException e) {
throw new AssertionError(getClass() + " is missing getDescriptor method.");
}
}
/**
* Human readable name of this kind of configurable object.
*/
public abstract String getDisplayName();
/**
* Uniquely identifies this {@link Descriptor} among all the other
* {@link Descriptor}s.
*
* <p> Historically {@link #clazz} is assumed to be unique, so this method
* uses that as the default, but if you are adding {@link Descriptor}s
* programmatically for the same type, you can change this to disambiguate
* them.
*
* @return Stick to valid Java identifier character, plus '.', which had to
* be allowed for historical reasons.
*
* @since 1.391
*/
public String getId() {
return clazz.getName();
}
/**
* Gets the URL that this Descriptor is bound to, relative to the nearest
* {@link DescriptorByNameOwner}. Since {@link Hudson} is a
* {@link DescriptorByNameOwner}, there's always one such ancestor to any
* request.
*/
public String getDescriptorUrl() {
return "descriptorByName/" + getId();
}
private String getCurrentDescriptorByNameUrl() {
StaplerRequest req = Stapler.getCurrentRequest();
Ancestor a = req.findAncestor(DescriptorByNameOwner.class);
return a.getUrl();
}
/**
* If the field "xyz" of a {@link Describable} has the corresponding
* "doCheckXyz" method, return the form-field validation string. Otherwise
* null. <p> This method is used to hook up the form validation method to
* the corresponding HTML input element.
*/
public String getCheckUrl(String fieldName) {
String method = checkMethods.get(fieldName);
if (method == null) {
method = calcCheckUrl(fieldName);
checkMethods.put(fieldName, method);
}
if (method.equals(NONE)) // == would do, but it makes IDE flag a warning
{
return null;
}
// put this under the right contextual umbrella.
// a is always non-null because we already have Hudson as the sentinel
return singleQuote(getCurrentDescriptorByNameUrl() + '/') + '+' + method;
}
private String calcCheckUrl(String fieldName) {
String capitalizedFieldName = StringUtils.capitalize(fieldName);
Method method = ReflectionUtils.getPublicMethodNamed(getClass(), "doCheck" + capitalizedFieldName);
if (method == null) {
return NONE;
}
return singleQuote(getDescriptorUrl() + "/check" + capitalizedFieldName) + buildParameterList(method, new StringBuilder()).append(".toString()");
}
/**
* Builds query parameter line by figuring out what should be submitted
*/
private StringBuilder buildParameterList(Method method, StringBuilder query) {
for (Parameter p : ReflectionUtils.getParameters(method)) {
QueryParameter qp = p.annotation(QueryParameter.class);
if (qp != null) {
String name = qp.value();
if (name.length() == 0) {
name = p.name();
}
if (name == null || name.length() == 0) {
continue; // unknown parameter name. we'll report the error when the form is submitted.
}
RelativePath rp = p.annotation(RelativePath.class);
if (rp != null) {
name = rp.value() + '/' + name;
}
if (query.length() == 0) {
query.append("+qs(this)");
}
if (name.equals("value")) {
// The special 'value' parameter binds to the the current field
query.append(".addThis()");
} else {
query.append(".nearBy('" + name + "')");
}
continue;
}
Method m = ReflectionUtils.getPublicMethodNamed(p.type(), "fromStapler");
if (m != null) {
buildParameterList(m, query);
}
}
return query;
}
/**
* Computes the list of other form fields that the given field depends on,
* via the doFillXyzItems method, and sets that as the 'fillDependsOn'
* attribute. Also computes the URL of the doFillXyzItems and sets that as
* the 'fillUrl' attribute.
*/
public void calcFillSettings(String field, Map<String, Object> attributes) {
String capitalizedFieldName = StringUtils.capitalize(field);
String methodName = "doFill" + capitalizedFieldName + "Items";
Method method = ReflectionUtils.getPublicMethodNamed(getClass(), methodName);
if (method == null) {
throw new IllegalStateException(String.format("%s doesn't have the %s method for filling a drop-down list", getClass(), methodName));
}
// build query parameter line by figuring out what should be submitted
List<String> depends = buildFillDependencies(method, new ArrayList<String>());
if (!depends.isEmpty()) {
attributes.put("fillDependsOn", Util.join(depends, " "));
}
attributes.put("fillUrl", String.format("%s/%s/fill%sItems", getCurrentDescriptorByNameUrl(), getDescriptorUrl(), capitalizedFieldName));
}
private List<String> buildFillDependencies(Method method, List<String> depends) {
for (Parameter p : ReflectionUtils.getParameters(method)) {
QueryParameter qp = p.annotation(QueryParameter.class);
if (qp != null) {
String name = qp.value();
if (name.length() == 0) {
name = p.name();
}
if (name == null || name.length() == 0) {
continue; // unknown parameter name. we'll report the error when the form is submitted.
}
RelativePath rp = p.annotation(RelativePath.class);
if (rp != null) {
name = rp.value() + '/' + name;
}
depends.add(name);
continue;
}
Method m = ReflectionUtils.getPublicMethodNamed(p.type(), "fromStapler");
if (m != null) {
buildFillDependencies(m, depends);
}
}
return depends;
}
/**
* Computes the auto-completion setting
*/
public void calcAutoCompleteSettings(String field, Map<String, Object> attributes) {
String capitalizedFieldName = StringUtils.capitalize(field);
String methodName = "doAutoComplete" + capitalizedFieldName;
Method method = ReflectionUtils.getPublicMethodNamed(getClass(), methodName);
if (method == null) {
return; // no auto-completion
}
attributes.put("autoCompleteUrl", String.format("%s/%s/autoComplete%s", getCurrentDescriptorByNameUrl(), getDescriptorUrl(), capitalizedFieldName));
}
/**
* Used by Jelly to abstract away the handlign of global.jelly vs
* config.jelly databinding difference.
*/
public PropertyType getPropertyType(Object instance, String field) {
// in global.jelly, instance==descriptor
return instance == this ? getGlobalPropertyType(field) : getPropertyType(field);
}
/**
* Obtains the property type of the given field of {@link #clazz}
*/
public PropertyType getPropertyType(String field) {
if (propertyTypes == null) {
propertyTypes = buildPropertyTypes(clazz);
}
return propertyTypes.get(field);
}
/**
* Obtains the property type of the given field of this descriptor.
*/
public PropertyType getGlobalPropertyType(String field) {
if (globalPropertyTypes == null) {
globalPropertyTypes = buildPropertyTypes(getClass());
}
return globalPropertyTypes.get(field);
}
/**
* Given the class, list up its {@link PropertyType}s from its public
* fields/getters.
*/
private Map<String, PropertyType> buildPropertyTypes(Class<?> clazz) {
Map<String, PropertyType> r = new HashMap<String, PropertyType>();
for (Field f : clazz.getFields()) {
r.put(f.getName(), new PropertyType(f));
}
for (Method m : clazz.getMethods()) {
if (m.getName().startsWith("get")) {
r.put(Introspector.decapitalize(m.getName().substring(3)), new PropertyType(m));
}
}
return r;
}
/**
* Gets the class name nicely escaped to be usable as a key in the
* structured form submission.
*/
public final String getJsonSafeClassName() {
return getId().replace('.', '-');
}
/**
* @deprecated Implement {@link #newInstance(StaplerRequest, JSONObject)}
* method instead. Deprecated as of 1.145.
*/
public T newInstance(StaplerRequest req) throws FormException {
throw new UnsupportedOperationException(getClass() + " should implement newInstance(StaplerRequest,JSONObject)");
}
/**
* Creates a configured instance from the submitted form.
*
* <p> Hudson only invokes this method when the user wants an instance of
* <tt>T</tt>. So there's no need to check that in the implementation.
*
* <p> Starting 1.206, the default implementation of this method does the
* following:
* <pre>
* req.bindJSON(clazz,formData);
* </pre> <p> ... which performs the databinding on the constructor of
* {@link #clazz}.
*
* <p> For some types of {@link Describable}, such as
* {@link ListViewColumn}, this method can be invoked with null request
* object for historical reason. Such design is considered broken, but due
* to the compatibility reasons we cannot fix it. Because of this, the
* default implementation gracefully handles null request, but the contract
* of the method still is "request is always non-null." Extension points
* that need to define the "default instance" semantics should define a
* descriptor subtype and add the no-arg newInstance method.
*
* @param req Always non-null (see note above.) This object includes
* represents the entire submission.
* @param formData The JSON object that captures the configuration data for
* this {@link Descriptor}. See
* http://wiki.hudson-ci.org/display/HUDSON/Structured+Form+Submission
* Always non-null.
*
* @throws FormException Signals a problem in the submitted form.
* @since 1.145
*/
public T newInstance(StaplerRequest req, JSONObject formData) throws FormException {
try {
Method m = getClass().getMethod("newInstance", StaplerRequest.class);
if (!Modifier.isAbstract(m.getDeclaringClass().getModifiers())) {
// this class overrides newInstance(StaplerRequest).
// maintain the backward compatible behavior
return verifyNewInstance(newInstance(req));
} else {
if (req == null) {
// yes, req is supposed to be always non-null, but see the note above
return verifyNewInstance(clazz.newInstance());
}
// new behavior as of 1.206
return verifyNewInstance(req.bindJSON(clazz, formData));
}
} catch (NoSuchMethodException e) {
throw new AssertionError(e); // impossible
} catch (InstantiationException e) {
throw new Error("Failed to instantiate " + clazz + " from " + formData, e);
} catch (IllegalAccessException e) {
throw new Error("Failed to instantiate " + clazz + " from " + formData, e);
} catch (RuntimeException e) {
throw new RuntimeException("Failed to instantiate " + clazz + " from " + formData, e);
}
}
/**
* Look out for a typical error a plugin developer makes. See
* http://hudson.361315.n4.nabble.com/Help-Hint-needed-Post-build-action-doesn-t-stay-activated-td2308833.html
*/
private T verifyNewInstance(T t) {
if (t != null && t.getDescriptor() != this) {
// TODO: should this be a fatal error?
LOGGER.warning("Father of " + t + " and its getDescriptor() points to two different instances. Probably malplaced @Extension. See http://hudson.361315.n4.nabble.com/Help-Hint-needed-Post-build-action-doesn-t-stay-activated-td2308833.html");
}
return t;
}
/**
* Returns the resource path to the help screen HTML, if any.
*
* <p> Starting 1.282, this method uses "convention over configuration"
* — you should just put the "help.html" (and its localized versions,
* if any) in the same directory you put your Jelly view files, and this
* method will automatically does the right thing.
*
* <p> This value is relative to the context root of Hudson, so normally the
* values are something like <tt>"/plugin/emma/help.html"</tt> to refer to
* static resource files in a plugin, or
* <tt>"/publisher/EmmaPublisher/abc"</tt> to refer to Jelly script
* <tt>abc.jelly</tt> or a method <tt>EmmaPublisher.doAbc()</tt>.
*
* @return null to indicate that there's no help.
*/
public String getHelpFile() {
return getHelpFile(null);
}
/**
* Returns the path to the help screen HTML for the given field.
*
* <p>
* The help files are assumed to be at "help/FIELDNAME.html" with possible
* locale variations.
*/
public String getHelpFile(final String fieldName) {
return getHelpFile(clazz, fieldName);
}
public String getHelpFile(Class<?> clazz, String fieldName) {
String v = helpRedirect.get(fieldName);
if (v != null) {
return v;
}
for (Class<?> c : getAncestors(clazz)) {
String page = "/descriptor/" + getId() + "/help";
String suffix;
if (fieldName == null) {
suffix = "";
} else {
page += '/' + fieldName;
suffix = '-' + fieldName;
}
try {
if (Stapler.getCurrentRequest().getView(c, "help" + suffix) != null) {
return page;
}
} catch (IOException e) {
throw new Error(e);
}
if (getHelpStream(c, suffix) != null) {
return page;
}
}
return null;
}
private Iterable<Class<?>> getAncestors(Class clazz) {
List<Class<?>> r = new ArrayList<Class<?>>();
for (; clazz != null; clazz = clazz.getSuperclass()) {
r.add(clazz);
}
return r;
}
private InputStream getHelpStream(Class c, String suffix) {
Locale locale = Stapler.getCurrentRequest().getLocale();
String base = c.getName().replace('.', '/').replace('$', '/') + "/help" + suffix;
ClassLoader cl = c.getClassLoader();
if (cl == null) {
return null;
}
InputStream in;
in = cl.getResourceAsStream(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + '_' + locale.getVariant() + ".html");
if (in != null) {
return in;
}
in = cl.getResourceAsStream(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + ".html");
if (in != null) {
return in;
}
in = cl.getResourceAsStream(base + '_' + locale.getLanguage() + ".html");
if (in != null) {
return in;
}
// default
return cl.getResourceAsStream(base + ".html");
}
private URL getStaticHelpUrl(Class<?> c, String suffix) {
Locale locale = Stapler.getCurrentRequest().getLocale();
String base = "help" + suffix;
URL url;
url = c.getResource(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + '_' + locale.getVariant() + ".html");
if (url != null) {
return url;
}
url = c.getResource(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + ".html");
if (url != null) {
return url;
}
url = c.getResource(base + '_' + locale.getLanguage() + ".html");
if (url != null) {
return url;
}
// default
return c.getResource(base + ".html");
}
/**
* Tells Hudson that the help file for the field 'fieldName' is defined in
* the help file for the 'fieldNameToRedirectTo' in the 'owner' class.
*
* @since 1.425
*/
protected void addHelpFileRedirect(String fieldName, Class<? extends Describable> owner, String fieldNameToRedirectTo) {
helpRedirect.put(fieldName,
Hudson.getInstance().getDescriptor(owner).getHelpFile(fieldNameToRedirectTo));
}
/**
* Checks if the given object is created from this {@link Descriptor}.
*/
public final boolean isInstance(T instance) {
return clazz.isInstance(instance);
}
/**
* Checks if the type represented by this descriptor is a subtype of the
* given type.
*/
public final boolean isSubTypeOf(Class type) {
return type.isAssignableFrom(clazz);
}
/**
* @deprecated As of 1.239, use
* {@link #configure(StaplerRequest, JSONObject)}.
*/
public boolean configure(StaplerRequest req) throws FormException {
return true;
}
/**
* Invoked when the global configuration page is submitted.
*
* Can be overriden to store descriptor-specific information.
*
* @param json The JSON object that captures the configuration data for this
* {@link Descriptor}. See
* http://wiki.hudson-ci.org/display/HUDSON/Structured+Form+Submission
* @return false to keep the client in the same config page.
*/
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
// compatibility
return configure(req);
}
public String getConfigPage() {
return getViewPage(clazz, "config.jelly");
}
public String getGlobalConfigPage() {
return getViewPage(clazz, "global.jelly", null);
}
private String getViewPage(Class<?> clazz, String pageName, String defaultValue) {
while (clazz != Object.class) {
String name = clazz.getName().replace('.', '/').replace('$', '/') + "/" + pageName;
if (clazz.getClassLoader().getResource(name) != null) {
return '/' + name;
}
clazz = clazz.getSuperclass();
}
return defaultValue;
}
protected final String getViewPage(Class<?> clazz, String pageName) {
// We didn't find the configuration page.
// Either this is non-fatal, in which case it doesn't matter what string we return so long as
// it doesn't exist.
// Or this error is fatal, in which case we want the developer to see what page he's missing.
// so we put the page name.
return getViewPage(clazz, pageName, pageName);
}
/**
* Saves the configuration info to the disk.
*/
public synchronized void save() {
if (BulkChange.contains(this)) {
return;
}
try {
getConfigFile().write(this);
SaveableListener.fireOnChange(this, getConfigFile());
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failed to save " + getConfigFile(), e);
}
}
/**
* Loads the data from the disk into this object.
*
* <p> The constructor of the derived class must call this method. (If we do
* that in the base class, the derived class won't get a chance to set
* default values.)
*/
public synchronized void load() {
XmlFile file = getConfigFile();
if (!file.exists()) {
return;
}
try {
file.unmarshal(this);
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failed to load " + file, e);
}
}
public XmlFile getConfigFile() {
return new XmlFile(new File(Hudson.getInstance().getRootDir(), getId() + ".xml"));
}
/**
* Serves <tt>help.html</tt> from the resource of {@link #clazz}.
*/
public void doHelp(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
String path = req.getRestOfPath();
if (path.contains("..")) {
throw new ServletException("Illegal path: " + path);
}
path = path.replace('/', '-');
PluginWrapper pw = getPlugin();
if (pw != null) {
rsp.setHeader("X-Plugin-Short-Name", pw.getShortName());
rsp.setHeader("X-Plugin-Long-Name", pw.getLongName());
rsp.setHeader("X-Plugin-From", pw.getUrl());
}
for (Class<?> c = clazz; c != null; c = c.getSuperclass()) {
RequestDispatcher rd = Stapler.getCurrentRequest().getView(c, "help" + path);
if (rd != null) {// template based help page
rd.forward(req, rsp);
return;
}
InputStream in = getHelpStream(c, path);
if (in != null) {
// TODO: generalize macro expansion and perhaps even support JEXL
rsp.setContentType("text/html;charset=UTF-8");
//InputStream in = url.openStream();
try {
String literal = IOUtils.toString(in, "UTF-8");
rsp.getWriter().println(Util.replaceMacro(literal, Collections.singletonMap("rootURL", req.getContextPath())));
} finally {
IOUtils.closeQuietly(in);
}
return;
}
}
rsp.sendError(SC_NOT_FOUND);
}
protected PluginWrapper getPlugin() {
return Hudson.getInstance().getPluginManager().whichPlugin(clazz);
}
//
// static methods
//
// to work around warning when creating a generic array type
public static <T> T[] toArray(T... values) {
return values;
}
public static <T> List<T> toList(T... values) {
return new ArrayList<T>(Arrays.asList(values));
}
public static <T extends Describable<T>> Map<Descriptor<T>, T> toMap(Iterable<T> describables) {
Map<Descriptor<T>, T> m = new LinkedHashMap<Descriptor<T>, T>();
if (null != describables) {
for (T d : describables) {
m.put(d.getDescriptor(), d);
}
}
return m;
}
/**
* Used to build {@link Describable} instance list from <f:hetero-list>
* tag.
*
* @param req Request that represents the form submission.
* @param formData Structured form data that represents the contains data
* for the list of describables.
* @param key The JSON property name for 'formData' that represents the data
* for the list of describables.
* @param descriptors List of descriptors to create instances from.
* @return Can be empty but never null.
*/
public static <T extends Describable<T>> List<T> newInstancesFromHeteroList(StaplerRequest req, JSONObject formData, String key,
Collection<? extends Descriptor<T>> descriptors) throws FormException {
return newInstancesFromHeteroList(req, formData.get(key), descriptors);
}
public static <T extends Describable<T>> List<T> newInstancesFromHeteroList(StaplerRequest req, Object formData,
Collection<? extends Descriptor<T>> descriptors) throws FormException {
List<T> items = new ArrayList<T>();
if (formData != null) {
for (Object o : JSONArray.fromObject(formData)) {
JSONObject jo = (JSONObject) o;
String kind = jo.getString("kind");
items.add(find(descriptors, kind).newInstance(req, jo));
}
}
return items;
}
/**
* Finds a descriptor from a collection by its class name.
*/
public static <T extends Descriptor> T find(Collection<? extends T> list, String className) {
for (T d : list) {
if (d.getClass().getName().equals(className)) {
return d;
}
}
return null;
}
public static Descriptor find(String className) {
return find(Hudson.getInstance().getExtensionList(Descriptor.class), className);
}
public static final class FormException extends Exception implements HttpResponse {
private final String formField;
public FormException(String message, String formField) {
super(message);
this.formField = formField;
}
public FormException(String message, Throwable cause, String formField) {
super(message, cause);
this.formField = formField;
}
public FormException(Throwable cause, String formField) {
super(cause);
this.formField = formField;
}
/**
* Which form field contained an error?
*/
public String getFormField() {
return formField;
}
public void generateResponse(StaplerRequest req, StaplerResponse rsp, Object node) throws IOException, ServletException {
// for now, we can't really use the field name that caused the problem.
new Failure(getMessage()).generateResponse(req, rsp, node);
}
}
private static final Logger LOGGER = Logger.getLogger(Descriptor.class.getName());
/**
* Used in {@link #checkMethods} to indicate that there's no check method.
*/
private static final String NONE = "\u0000";
private Object readResolve() {
if (properties != null) {
OldDataMonitor.report(this, "1.62");
}
return this;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.autoscaling;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingCapacity;
import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderResult;
import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderResults;
import org.elasticsearch.xpack.autoscaling.capacity.FixedAutoscalingDeciderService;
import org.elasticsearch.xpack.autoscaling.policy.AutoscalingPolicy;
import org.elasticsearch.xpack.autoscaling.policy.AutoscalingPolicyMetadata;
import java.util.BitSet;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public abstract class AutoscalingTestCase extends ESTestCase {
public static AutoscalingDeciderResult randomAutoscalingDeciderResult() {
AutoscalingCapacity capacity = randomNullableAutoscalingCapacity();
return randomAutoscalingDeciderResultWithCapacity(capacity);
}
protected static AutoscalingDeciderResult randomAutoscalingDeciderResultWithCapacity(AutoscalingCapacity capacity) {
return new AutoscalingDeciderResult(
capacity,
new FixedAutoscalingDeciderService.FixedReason(randomNullableByteSizeValue(), randomNullableByteSizeValue(), randomInt(1000))
);
}
public static AutoscalingDeciderResults randomAutoscalingDeciderResults() {
final SortedMap<String, AutoscalingDeciderResult> results = IntStream.range(0, randomIntBetween(1, 10))
.mapToObj(i -> Tuple.tuple(Integer.toString(i), randomAutoscalingDeciderResult()))
.collect(Collectors.toMap(Tuple::v1, Tuple::v2, (a, b) -> { throw new IllegalStateException(); }, TreeMap::new));
AutoscalingCapacity capacity = new AutoscalingCapacity(randomAutoscalingResources(), randomAutoscalingResources());
return new AutoscalingDeciderResults(capacity, randomNodes(), results);
}
public static AutoscalingCapacity randomAutoscalingCapacity() {
AutoscalingCapacity.AutoscalingResources total = randomNullValueAutoscalingResources();
return new AutoscalingCapacity(
total,
randomBoolean() ? randomNullValueAutoscalingResources(total.storage() != null, total.memory() != null) : null
);
}
protected static AutoscalingCapacity randomNullableAutoscalingCapacity() {
return randomBoolean() ? randomAutoscalingCapacity() : null;
}
protected static AutoscalingCapacity.AutoscalingResources randomAutoscalingResources() {
return new AutoscalingCapacity.AutoscalingResources(randomByteSizeValue(), randomByteSizeValue());
}
private static AutoscalingCapacity.AutoscalingResources randomNullValueAutoscalingResources() {
return randomNullValueAutoscalingResources(true, true);
}
public static AutoscalingCapacity.AutoscalingResources randomNullValueAutoscalingResources(boolean allowStorage, boolean allowMemory) {
assert allowMemory || allowStorage;
boolean addStorage = (allowStorage && randomBoolean()) || allowMemory == false;
boolean addMemory = (allowMemory && randomBoolean()) || addStorage == false;
return new AutoscalingCapacity.AutoscalingResources(
addStorage ? randomByteSizeValue() : null,
addMemory ? randomByteSizeValue() : null
);
}
public static SortedSet<DiscoveryNode> randomNodes() {
String prefix = randomAlphaOfLength(5);
return IntStream.range(0, randomIntBetween(1, 10))
.mapToObj(
i -> new DiscoveryNode(
prefix + i,
buildNewFakeTransportAddress(),
Map.of(),
randomRoles().stream().map(DiscoveryNodeRole::getRoleFromRoleName).collect(Collectors.toSet()),
Version.CURRENT
)
)
.collect(Collectors.toCollection(() -> new TreeSet<>(AutoscalingDeciderResults.DISCOVERY_NODE_COMPARATOR)));
}
public static ByteSizeValue randomByteSizeValue() {
// do not want to test any overflow.
return new ByteSizeValue(randomLongBetween(0, Long.MAX_VALUE >> 16));
}
public static ByteSizeValue randomNullableByteSizeValue() {
return randomBoolean() ? randomByteSizeValue() : null;
}
public static SortedMap<String, Settings> randomAutoscalingDeciders() {
return new TreeMap<>(
List.of(randomFixedDecider()).stream().collect(Collectors.toMap(d -> FixedAutoscalingDeciderService.NAME, Function.identity()))
);
}
public static Settings randomFixedDecider() {
Settings.Builder configurationBuilder = Settings.builder();
if (randomBoolean()) {
configurationBuilder.put(FixedAutoscalingDeciderService.STORAGE.getKey(), randomByteSizeValue());
}
if (randomBoolean()) {
configurationBuilder.put(FixedAutoscalingDeciderService.MEMORY.getKey(), randomByteSizeValue());
}
if (randomBoolean()) {
configurationBuilder.put(FixedAutoscalingDeciderService.NODES.getKey(), randomIntBetween(1, 10));
}
return configurationBuilder.build();
}
public static AutoscalingPolicy randomAutoscalingPolicy() {
return randomAutoscalingPolicyOfName(randomAlphaOfLength(8));
}
public static AutoscalingPolicy randomAutoscalingPolicyOfName(final String name) {
return new AutoscalingPolicy(name, randomRoles(), randomAutoscalingDeciders());
}
public static AutoscalingPolicy mutateAutoscalingPolicy(final AutoscalingPolicy instance) {
String name = instance.name();
SortedSet<String> roles = instance.roles();
SortedMap<String, Settings> deciders = instance.deciders();
BitSet choice = BitSet.valueOf(new long[] { randomIntBetween(1, 7) });
if (choice.get(0)) {
name = randomValueOtherThan(instance.name(), () -> randomAlphaOfLength(8));
}
if (choice.get(1)) {
roles = mutateRoles(roles);
}
if (choice.get(2)) {
deciders = mutateAutoscalingDeciders(deciders);
}
return new AutoscalingPolicy(name, roles, deciders);
}
protected static SortedSet<String> mutateRoles(SortedSet<String> roles) {
return randomValueOtherThan(roles, AutoscalingTestCase::randomRoles);
}
public static SortedMap<String, Settings> mutateAutoscalingDeciders(final SortedMap<String, Settings> deciders) {
if (deciders.size() == 0) {
return randomAutoscalingDeciders();
} else {
// use a proper subset of the deciders
return new TreeMap<>(
randomSubsetOf(randomIntBetween(0, deciders.size() - 1), deciders.entrySet()).stream()
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))
);
}
}
public static AutoscalingMetadata randomAutoscalingMetadata() {
return randomAutoscalingMetadataOfPolicyCount(randomIntBetween(0, 8));
}
public static AutoscalingMetadata randomAutoscalingMetadataOfPolicyCount(final int numberOfPolicies) {
final SortedMap<String, AutoscalingPolicyMetadata> policies = new TreeMap<>();
for (int i = 0; i < numberOfPolicies; i++) {
final AutoscalingPolicy policy = randomAutoscalingPolicy();
final AutoscalingPolicyMetadata policyMetadata = new AutoscalingPolicyMetadata(policy);
policies.put(policy.name(), policyMetadata);
}
return new AutoscalingMetadata(policies);
}
public static SortedSet<String> randomRoles() {
return randomSubsetOf(DiscoveryNodeRole.roleNames()).stream().collect(Sets.toUnmodifiableSortedSet());
}
public static NamedWriteableRegistry getAutoscalingNamedWriteableRegistry() {
return new NamedWriteableRegistry(new Autoscaling().getNamedWriteables());
}
public static NamedXContentRegistry getAutoscalingXContentRegistry() {
return new NamedXContentRegistry(new Autoscaling().getNamedXContent());
}
}
| |
/*
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.drawee.drawable;
import java.util.Arrays;
import android.graphics.Canvas;
import android.graphics.drawable.Drawable;
import android.os.SystemClock;
import com.facebook.common.internal.Preconditions;
import com.facebook.common.internal.VisibleForTesting;
/**
* A drawable that fades to the specific layer.
*
* <p> Arbitrary number of layers is supported. 5 Different fade methods are supported.
* Once the transition starts we will animate layers in or out based on used fade method.
* fadeInLayer fades in specified layer to full opacity.
* fadeOutLayer fades out specified layer to zero opacity.
* fadeOutAllLayers fades out all layers to zero opacity.
* fadeToLayer fades in specified layer to full opacity, fades out all other layers to zero opacity.
* fadeUpToLayer fades in all layers up to specified layer to full opacity and
* fades out all other layers to zero opacity.
*
*/
public class FadeDrawable extends ArrayDrawable {
/**
* A transition is about to start.
*/
@VisibleForTesting
public static final int TRANSITION_STARTING = 0;
/**
* The transition has started and the animation is in progress.
*/
@VisibleForTesting
public static final int TRANSITION_RUNNING = 1;
/**
* No transition will be applied.
*/
@VisibleForTesting
public static final int TRANSITION_NONE = 2;
/**
* Layers.
*/
private final Drawable[] mLayers;
/**
* The current state.
*/
@VisibleForTesting int mTransitionState;
@VisibleForTesting int mDurationMs;
@VisibleForTesting long mStartTimeMs;
@VisibleForTesting int[] mStartAlphas;
@VisibleForTesting int[] mAlphas;
@VisibleForTesting int mAlpha;
/**
* Determines whether to fade-out a layer to zero opacity (false) or to fade-in to
* the full opacity (true)
*/
@VisibleForTesting boolean[] mIsLayerOn;
/**
* When in batch mode, drawable won't invalidate self until batch mode finishes.
*/
@VisibleForTesting int mPreventInvalidateCount;
/**
* Creates a new fade drawable.
* The first layer is displayed with full opacity whereas all other layers are invisible.
* @param layers layers to fade between
*/
public FadeDrawable(Drawable[] layers) {
super(layers);
Preconditions.checkState(layers.length >= 1, "At least one layer required!");
mLayers = layers;
mStartAlphas = new int[layers.length];
mAlphas = new int[layers.length];
mAlpha = 255;
mIsLayerOn = new boolean[layers.length];
mPreventInvalidateCount = 0;
resetInternal();
}
@Override
public void invalidateSelf() {
if (mPreventInvalidateCount == 0) {
super.invalidateSelf();
}
}
/**
* Begins the batch mode so that it doesn't invalidate self on every operation.
*/
public void beginBatchMode() {
mPreventInvalidateCount++;
}
/**
* Ends the batch mode and invalidates.
*/
public void endBatchMode() {
mPreventInvalidateCount--;
invalidateSelf();
}
/**
* Sets the duration of the current transition in milliseconds.
*/
public void setTransitionDuration(int durationMs) {
mDurationMs = durationMs;
// re-initialize transition if it's running
if (mTransitionState == TRANSITION_RUNNING) {
mTransitionState = TRANSITION_STARTING;
}
}
/**
* Gets the transition duration.
* @return transition duration in milliseconds.
*/
public int getTransitionDuration() {
return mDurationMs;
}
/**
* Resets internal state to the initial state.
*/
private void resetInternal() {
mTransitionState = TRANSITION_NONE;
Arrays.fill(mStartAlphas, 0);
mStartAlphas[0] = 255;
Arrays.fill(mAlphas, 0);
mAlphas[0] = 255;
Arrays.fill(mIsLayerOn, false);
mIsLayerOn[0] = true;
}
/**
* Resets to the initial state.
*/
public void reset() {
resetInternal();
invalidateSelf();
}
/**
* Starts fading in the specified layer.
* @param index the index of the layer to fade in.
*/
public void fadeInLayer(int index) {
mTransitionState = TRANSITION_STARTING;
mIsLayerOn[index] = true;
invalidateSelf();
}
/**
* Starts fading out the specified layer.
* @param index the index of the layer to fade out.
*/
public void fadeOutLayer(int index) {
mTransitionState = TRANSITION_STARTING;
mIsLayerOn[index] = false;
invalidateSelf();
}
/**
* Starts fading in all layers.
*/
public void fadeInAllLayers() {
mTransitionState = TRANSITION_STARTING;
Arrays.fill(mIsLayerOn, true);
invalidateSelf();
}
/**
* Starts fading out all layers.
*/
public void fadeOutAllLayers() {
mTransitionState = TRANSITION_STARTING;
Arrays.fill(mIsLayerOn, false);
invalidateSelf();
}
/**
* Starts fading to the specified layer.
* @param index the index of the layer to fade to
*/
public void fadeToLayer(int index) {
mTransitionState = TRANSITION_STARTING;
Arrays.fill(mIsLayerOn, false);
mIsLayerOn[index] = true;
invalidateSelf();
}
/**
* Starts fading up to the specified layer.
* <p>
* Layers up to the specified layer inclusive will fade in, other layers will fade out.
* @param index the index of the layer to fade up to.
*/
public void fadeUpToLayer(int index) {
mTransitionState = TRANSITION_STARTING;
Arrays.fill(mIsLayerOn, 0, index + 1, true);
Arrays.fill(mIsLayerOn, index + 1, mLayers.length, false);
invalidateSelf();
}
/**
* Finishes transition immediately.
*/
public void finishTransitionImmediately() {
mTransitionState = TRANSITION_NONE;
for (int i = 0; i < mLayers.length; i++) {
mAlphas[i] = mIsLayerOn[i] ? 255 : 0;
}
invalidateSelf();
}
/**
* Updates the current alphas based on the ratio of the elapsed time and duration.
* @param ratio
* @return whether the all layers have reached their target opacity
*/
private boolean updateAlphas(float ratio) {
boolean done = true;
for (int i = 0; i < mLayers.length; i++) {
int dir = mIsLayerOn[i] ? +1 : -1;
// determines alpha value and clamps it to [0, 255]
mAlphas[i] = (int) (mStartAlphas[i] + dir * 255 * ratio);
if (mAlphas[i] < 0) {
mAlphas[i] = 0;
}
if (mAlphas[i] > 255) {
mAlphas[i] = 255;
}
// determines whether the layer has reached its target opacity
if (mIsLayerOn[i] && mAlphas[i] < 255) {
done = false;
}
if (!mIsLayerOn[i] && mAlphas[i] > 0) {
done = false;
}
}
return done;
}
@Override
public void draw(Canvas canvas) {
boolean done = true;
float ratio;
switch (mTransitionState) {
case TRANSITION_STARTING:
// initialize start alphas and start time
System.arraycopy(mAlphas, 0, mStartAlphas, 0, mLayers.length);
mStartTimeMs = getCurrentTimeMs();
// if the duration is 0, update alphas to the target opacities immediately
ratio = (mDurationMs == 0) ? 1.0f : 0.0f;
// if all the layers have reached their target opacity, transition is done
done = updateAlphas(ratio);
mTransitionState = done ? TRANSITION_NONE : TRANSITION_RUNNING;
break;
case TRANSITION_RUNNING:
Preconditions.checkState(mDurationMs > 0);
// determine ratio based on the elapsed time
ratio = (float) (getCurrentTimeMs() - mStartTimeMs) / mDurationMs;
// if all the layers have reached their target opacity, transition is done
done = updateAlphas(ratio);
mTransitionState = done ? TRANSITION_NONE : TRANSITION_RUNNING;
break;
case TRANSITION_NONE:
// there is no transition in progress and mAlphas should be left as is.
done = true;
break;
}
for (int i = 0; i < mLayers.length; i++) {
drawDrawableWithAlpha(canvas, mLayers[i], mAlphas[i] * mAlpha / 255);
}
if (!done) {
invalidateSelf();
}
}
private void drawDrawableWithAlpha(Canvas canvas, Drawable drawable, int alpha) {
if (alpha > 0) {
mPreventInvalidateCount++;
drawable.mutate().setAlpha(alpha);
mPreventInvalidateCount--;
drawable.draw(canvas);
}
}
@Override
public void setAlpha(int alpha) {
if (mAlpha != alpha) {
mAlpha = alpha;
invalidateSelf();
}
}
public int getAlpha() {
return mAlpha;
}
/**
* Returns current time. Absolute reference is not important as only time deltas are used.
* Extracting this to a separate method allows better testing.
* @return current time in milliseconds
*/
protected long getCurrentTimeMs() {
return SystemClock.uptimeMillis();
}
/**
* Gets the transition state (STARTING, RUNNING, NONE).
* Useful for testing purposes.
* @return transition state
*/
@VisibleForTesting
public int getTransitionState() {
return mTransitionState;
}
public boolean isLayerOn(int index) {
return mIsLayerOn[index];
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.expression.function;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.phoenix.compile.KeyPart;
import org.apache.phoenix.expression.Determinism;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.expression.LiteralExpression;
import org.apache.phoenix.parse.FunctionParseNode.Argument;
import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction;
import org.apache.phoenix.parse.FunctionParseNode.FunctionClassType;
import org.apache.phoenix.query.KeyRange;
import org.apache.phoenix.schema.IllegalDataException;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PDecimal;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PVarchar;
import com.google.common.collect.Lists;
/**
*
* Class encapsulating the process for rounding off a column/literal of type
* {@link org.apache.phoenix.schema.types.PDecimal}
*
*
* @since 3.0.0
*/
@BuiltInFunction(name = RoundFunction.NAME,
args = {
@Argument(allowedTypes={PDecimal.class}),
@Argument(allowedTypes={PVarchar.class, PInteger.class}, defaultValue = "null", isConstant=true),
@Argument(allowedTypes={PInteger.class}, defaultValue="1", isConstant=true)
},
classType = FunctionClassType.DERIVED
)
public class RoundDecimalExpression extends ScalarFunction {
private int scale;
/**
* Creates a {@link RoundDecimalExpression} with rounding scale given by @param scale.
*
*/
public static Expression create(Expression expr, int scale) throws SQLException {
if (expr.getDataType().isCoercibleTo(PLong.INSTANCE)) {
return expr;
}
Expression scaleExpr = LiteralExpression.newConstant(scale, PInteger.INSTANCE, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList(expr, scaleExpr);
return new RoundDecimalExpression(expressions);
}
/**
* Creates a {@link RoundDecimalExpression} with a default scale of 0 used for rounding.
*
*/
public static Expression create(Expression expr) throws SQLException {
return create(expr, 0);
}
public static Expression create(List<Expression> exprs) throws SQLException {
Expression expr = exprs.get(0);
if (expr.getDataType().isCoercibleTo(PLong.INSTANCE)) {
return expr;
}
if (exprs.size() == 1) {
Expression scaleExpr = LiteralExpression.newConstant(0, PInteger.INSTANCE, Determinism.ALWAYS);
exprs = Lists.newArrayList(expr, scaleExpr);
}
return new RoundDecimalExpression(exprs);
}
public RoundDecimalExpression() {}
public RoundDecimalExpression(List<Expression> children) {
super(children);
LiteralExpression scaleChild = (LiteralExpression)children.get(1);
PDataType scaleType = scaleChild.getDataType();
Object scaleValue = scaleChild.getValue();
if(scaleValue != null) {
if (scaleType.isCoercibleTo(PInteger.INSTANCE, scaleValue)) {
int scale = (Integer) PInteger.INSTANCE.toObject(scaleValue, scaleType);
if (scale <= PDataType.MAX_PRECISION) {
this.scale = scale;
return;
}
}
throw new IllegalDataException("Invalid second argument for scale: " + scaleValue + ". The scale must be between 0 and " + PDataType.MAX_PRECISION + " inclusive.");
}
}
@Override
public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
Expression childExpr = children.get(0);
if(childExpr.evaluate(tuple, ptr)) {
if (ptr.getLength()==0) {
return true;
}
BigDecimal value = (BigDecimal) PDecimal.INSTANCE.toObject(ptr, childExpr.getDataType(), childExpr.getSortOrder());
BigDecimal scaledValue = value.setScale(scale, getRoundingMode());
ptr.set(PDecimal.INSTANCE.toBytes(scaledValue));
return true;
}
return false;
}
@Override
public PDataType getDataType() {
return PDecimal.INSTANCE;
}
protected RoundingMode getRoundingMode() {
return RoundingMode.HALF_UP;
}
protected final int getRoundingScale() {
return scale;
}
@Override
public void readFields(DataInput input) throws IOException {
super.readFields(input);
scale = WritableUtils.readVInt(input);
}
@Override
public void write(DataOutput output) throws IOException {
super.write(output);
WritableUtils.writeVInt(output, scale);
}
@Override
public String getName() {
return RoundFunction.NAME;
}
@Override
public OrderPreserving preservesOrder() {
return OrderPreserving.YES;
}
@Override
public int getKeyFormationTraversalIndex() {
return 0;
}
@Override
public KeyPart newKeyPart(final KeyPart childPart) {
return new KeyPart() {
private final List<Expression> extractNodes = Collections.<Expression>singletonList(RoundDecimalExpression.this);
@Override
public PColumn getColumn() {
return childPart.getColumn();
}
@Override
public List<Expression> getExtractNodes() {
return extractNodes;
}
@Override
public KeyRange getKeyRange(CompareFilter.CompareOp op, Expression rhs) {
final BigDecimal rhsDecimal = (BigDecimal) PDecimal.INSTANCE.toObject(evaluateExpression(rhs));
// equality requires an exact match. if rounding would cut off more precision
// than needed for a match, it's impossible for there to be any matches
if(op == CompareFilter.CompareOp.EQUAL && !hasEnoughPrecisionToProduce(rhsDecimal)) {
return KeyRange.EMPTY_RANGE;
}
// if the decimal needs to be rounded, round it such that the given
// operator will still be valid
BigDecimal roundedDecimal = roundAndPreserveOperator(rhsDecimal, op);
// the range of big decimals that could be rounded to produce the rounded result
// alternatively, the "rounding bucket" that this decimal falls into
final KeyRange equalityRange = getInputRangeProducing(roundedDecimal);
boolean lowerInclusive = equalityRange.isLowerInclusive();
boolean upperInclusive = equalityRange.isUpperInclusive();
byte[] lowerRange = KeyRange.UNBOUND;
byte[] upperRange = KeyRange.UNBOUND;
switch(op) {
case EQUAL:
return equalityRange;
case GREATER:
// from the equality range and up, NOT including the equality range
lowerRange = equalityRange.getUpperRange();
lowerInclusive = !equalityRange.isUpperInclusive();
break;
case GREATER_OR_EQUAL:
// from the equality range and up, including the equality range
lowerRange = equalityRange.getLowerRange();
break;
case LESS:
// from the equality range and down, NOT including the equality range
upperRange = equalityRange.getLowerRange();
upperInclusive = !equalityRange.isLowerInclusive();
break;
case LESS_OR_EQUAL:
// from the equality range and down, including the equality range
upperRange = equalityRange.getUpperRange();
break;
default:
throw new AssertionError("Invalid CompareOp: " + op);
}
return KeyRange.getKeyRange(lowerRange, lowerInclusive, upperRange, upperInclusive);
}
/**
* Produces a the given decimal rounded to this rounding expression's scale. If the
* decimal requires more scale precision to produce than this expression has, as in
* ROUND(?, 2) > 2.0098974, it ensures that the decimal is rounded such that the
* given operator will still produce correct results.
* @param decimal the decimal to round with this expression's scale
* @param op the operator to preserve comparison with in the event of lost precision
* @return the rounded decimal
*/
private BigDecimal roundAndPreserveOperator(BigDecimal decimal, CompareFilter.CompareOp op) {
final BigDecimal rounded = roundToScale(decimal);
// if we lost information, make sure that the rounding didn't break the operator
if(!hasEnoughPrecisionToProduce(decimal)) {
switch(op) {
case GREATER_OR_EQUAL:
// e.g. 'ROUND(dec, 2) >= 2.013' would be converted to
// 'ROUND(dec, 2) >= 2.01' but should be 'ROUND(dec, 2) >= 2.02'
if(decimal.compareTo(rounded) > 0) {
return stepNextInScale(rounded);
}
break;
case GREATER:
// e.g. 'ROUND(dec, 2) > 2.017' would be converted to
// 'ROUND(dec, 2) > 2.02' but should be 'ROUND(dec, 2) > 2.01'
if(decimal.compareTo(rounded) < 0) {
return stepPrevInScale(rounded);
}
break;
case LESS_OR_EQUAL:
// e.g. 'ROUND(dec, 2) < 2.017' would be converted to
// 'ROUND(dec, 2) < 2.02' but should be 'ROUND(dec, 2) < 2.01'
if(decimal.compareTo(rounded) < 0) {
return stepPrevInScale(rounded);
}
break;
case LESS:
// e.g. 'ROUND(dec, 2) <= 2.013' would be converted to
// 'ROUND(dec, 2) <= 2.01' but should be 'ROUND(dec, 2) <= 2.02'
if(decimal.compareTo(rounded) > 0) {
return stepNextInScale(rounded);
}
break;
}
}
// otherwise, rounding has not affected the operator, so return normally
return rounded;
}
@Override
public PTable getTable() {
return childPart.getTable();
}
};
}
/**
* Finds the Decimal KeyRange that will produce the given result when fed into this
* rounding expression. For example, a ROUND expression with scale 2 will produce the
* result "2.05" with any decimal in the range [2.045, 2.0545).
* The result must be pre-rounded to within this rounding expression's scale.
* @param result the result to find an input range for. Must be producable.
* @return a KeyRange of DECIMAL keys that can be rounded by this expression to produce result
* @throws IllegalArgumentException if the result has more scale than this expression can produce
*/
protected KeyRange getInputRangeProducing(BigDecimal result) {
if(!hasEnoughPrecisionToProduce(result)) {
throw new IllegalArgumentException("Cannot produce input range for decimal " + result
+ ", not enough precision with scale " + getRoundingScale());
}
byte[] lowerRange = PDecimal.INSTANCE.toBytes(halfStepPrevInScale(result));
byte[] upperRange = PDecimal.INSTANCE.toBytes(halfStepNextInScale(result));
// inclusiveness changes depending on sign
// e.g. -0.5 rounds "up" to -1 even though it is the lower boundary
boolean lowerInclusive = result.signum() > 0;
boolean upperInclusive = result.signum() < 0;
return KeyRange.getKeyRange(lowerRange, lowerInclusive, upperRange, upperInclusive);
}
/**
* Determines whether this rounding expression's scale has enough precision to produce the
* minimum precision for the input decimal. In other words, determines whether the given
* decimal can be rounded to this scale without losing ordering information.
* For example, an expression with a scale of 2 has enough precision to produce "2.3", "2.71"
* and "2.100000", but does not have enough precision to produce "2.001"
* @param result the decimal to round
* @return true if the given decimal can be precisely matched by this rounding expression
*/
protected final boolean hasEnoughPrecisionToProduce(BigDecimal result) {
// use compareTo so that 2.0 and 2.00 are treated as "equal"
return roundToScale(result).compareTo(result) == 0;
}
/**
* Returns the given decimal rounded to this rounding expression's scale.
* For example, with scale 2 the decimal "2.453" would be rounded to either 2.45 or
* 2.46 depending on the rounding mode, while "2.38" and "2.7" would be unchanged.
* @param decimal the decimal to round
* @return the rounded result decimal
*/
protected final BigDecimal roundToScale(BigDecimal decimal) {
return decimal.setScale(getRoundingScale(), getRoundingMode());
}
/**
* Produces a value half of a "step" back in this expression's rounding scale.
* For example with a scale of 2, "2.5" would be stepped back to "2.495".
*/
protected final BigDecimal halfStepPrevInScale(BigDecimal decimal) {
BigDecimal step = BigDecimal.ONE.scaleByPowerOfTen(-getRoundingScale());
BigDecimal halfStep = step.divide(BigDecimal.valueOf(2));
return decimal.subtract(halfStep);
}
/**
* Produces a value half of a "step" forward in this expression's rounding scale.
* For example with a scale of 2, "2.5" would be stepped forward to "2.505".
*/
protected final BigDecimal halfStepNextInScale(BigDecimal decimal) {
BigDecimal step = BigDecimal.ONE.scaleByPowerOfTen(-getRoundingScale());
BigDecimal halfStep = step.divide(BigDecimal.valueOf(2));
return decimal.add(halfStep);
}
/**
* Produces a value one "step" back in this expression's rounding scale.
* For example with a scale of 2, "2.5" would be stepped back to "2.49".
*/
protected final BigDecimal stepPrevInScale(BigDecimal decimal) {
BigDecimal step = BigDecimal.ONE.scaleByPowerOfTen(-getRoundingScale());
return decimal.subtract(step);
}
/**
* Produces a value one "step" forward in this expression's rounding scale.
* For example with a scale of 2, "2.5" would be stepped forward to "2.51".
*/
protected final BigDecimal stepNextInScale(BigDecimal decimal) {
BigDecimal step = BigDecimal.ONE.scaleByPowerOfTen(-getRoundingScale());
return decimal.add(step);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.validation.entities;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.google.common.reflect.TypeToken;
import org.junit.Assert;
import org.junit.Test;
import com.datastax.driver.core.*;
import com.datastax.driver.core.exceptions.InvalidQueryException;
import org.apache.cassandra.cql3.CQLTester;
import org.apache.cassandra.cql3.QueryProcessor;
import org.apache.cassandra.cql3.UntypedResultSet;
import org.apache.cassandra.cql3.functions.FunctionName;
import org.apache.cassandra.cql3.functions.JavaBasedUDFunction;
import org.apache.cassandra.cql3.functions.UDFunction;
import org.apache.cassandra.db.marshal.CollectionType;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.schema.KeyspaceMetadata;
import org.apache.cassandra.schema.Schema;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.transport.*;
import org.apache.cassandra.transport.ProtocolVersion;
import org.apache.cassandra.transport.messages.ResultMessage;
import org.apache.cassandra.utils.ByteBufferUtil;
public class UFTest extends CQLTester
{
@Test
public void testJavaSourceName()
{
Assert.assertEquals("String", JavaBasedUDFunction.javaSourceName(TypeToken.of(String.class)));
Assert.assertEquals("java.util.Map<Integer, String>", JavaBasedUDFunction.javaSourceName(TypeTokens.mapOf(Integer.class, String.class)));
Assert.assertEquals("com.datastax.driver.core.UDTValue", JavaBasedUDFunction.javaSourceName(TypeToken.of(UDTValue.class)));
Assert.assertEquals("java.util.Set<com.datastax.driver.core.UDTValue>", JavaBasedUDFunction.javaSourceName(TypeTokens.setOf(UDTValue.class)));
}
@Test
public void testNonExistingOnes() throws Throwable
{
assertInvalidThrowMessage(String.format("Function '%s.func_does_not_exist' doesn't exist", KEYSPACE),
InvalidRequestException.class,
"DROP FUNCTION " + KEYSPACE + ".func_does_not_exist");
assertInvalidThrowMessage(String.format("Function '%s.func_does_not_exist(int, text)' doesn't exist", KEYSPACE),
InvalidRequestException.class,
"DROP FUNCTION " + KEYSPACE + ".func_does_not_exist(int, text)");
assertInvalidThrowMessage("Function 'keyspace_does_not_exist.func_does_not_exist' doesn't exist",
InvalidRequestException.class,
"DROP FUNCTION keyspace_does_not_exist.func_does_not_exist");
assertInvalidThrowMessage("Function 'keyspace_does_not_exist.func_does_not_exist(int, text)' doesn't exist",
InvalidRequestException.class,
"DROP FUNCTION keyspace_does_not_exist.func_does_not_exist(int, text)");
execute("DROP FUNCTION IF EXISTS " + KEYSPACE + ".func_does_not_exist");
execute("DROP FUNCTION IF EXISTS " + KEYSPACE + ".func_does_not_exist(int,text)");
execute("DROP FUNCTION IF EXISTS keyspace_does_not_exist.func_does_not_exist");
execute("DROP FUNCTION IF EXISTS keyspace_does_not_exist.func_does_not_exist(int,text)");
}
@Test
public void testSchemaChange() throws Throwable
{
String f = createFunction(KEYSPACE,
"double, double",
"CREATE OR REPLACE FUNCTION %s(state double, val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE javascript " +
"AS '\"string\";';");
assertLastSchemaChange(Event.SchemaChange.Change.CREATED, Event.SchemaChange.Target.FUNCTION,
KEYSPACE, parseFunctionName(f).name,
"double", "double");
createFunctionOverload(f,
"double, double",
"CREATE OR REPLACE FUNCTION %s(state int, val int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE javascript " +
"AS '\"string\";';");
assertLastSchemaChange(Event.SchemaChange.Change.CREATED, Event.SchemaChange.Target.FUNCTION,
KEYSPACE, parseFunctionName(f).name,
"int", "int");
schemaChange("CREATE OR REPLACE FUNCTION " + f + "(state int, val int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE javascript " +
"AS '\"string1\";';");
assertLastSchemaChange(Event.SchemaChange.Change.UPDATED, Event.SchemaChange.Target.FUNCTION,
KEYSPACE, parseFunctionName(f).name,
"int", "int");
schemaChange("DROP FUNCTION " + f + "(double, double)");
assertLastSchemaChange(Event.SchemaChange.Change.DROPPED, Event.SchemaChange.Target.FUNCTION,
KEYSPACE, parseFunctionName(f).name,
"double", "double");
}
@Test
public void testFunctionDropOnKeyspaceDrop() throws Throwable
{
String fSin = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE FUNCTION %s ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java " +
"AS 'return Double.valueOf(Math.sin(input.doubleValue()));'");
FunctionName fSinName = parseFunctionName(fSin);
Assert.assertEquals(1, Schema.instance.getFunctions(parseFunctionName(fSin)).size());
assertRows(execute("SELECT function_name, language FROM system_schema.functions WHERE keyspace_name=?", KEYSPACE_PER_TEST),
row(fSinName.name, "java"));
dropPerTestKeyspace();
assertRows(execute("SELECT function_name, language FROM system_schema.functions WHERE keyspace_name=?", KEYSPACE_PER_TEST));
Assert.assertEquals(0, Schema.instance.getFunctions(fSinName).size());
}
@Test
public void testFunctionDropPreparedStatement() throws Throwable
{
createTable("CREATE TABLE %s (key int PRIMARY KEY, d double)");
String fSin = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE FUNCTION %s ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java " +
"AS 'return Double.valueOf(Math.sin(input.doubleValue()));'");
FunctionName fSinName = parseFunctionName(fSin);
Assert.assertEquals(1, Schema.instance.getFunctions(parseFunctionName(fSin)).size());
// create a pairs of Select and Inserts. One statement in each pair uses the function so when we
// drop it those statements should be removed from the cache in QueryProcessor. The other statements
// should be unaffected.
ResultMessage.Prepared preparedSelect1 = QueryProcessor.prepare(
String.format("SELECT key, %s(d) FROM %s.%s", fSin, KEYSPACE, currentTable()),
ClientState.forInternalCalls());
ResultMessage.Prepared preparedSelect2 = QueryProcessor.prepare(
String.format("SELECT key FROM %s.%s", KEYSPACE, currentTable()),
ClientState.forInternalCalls());
ResultMessage.Prepared preparedInsert1 = QueryProcessor.prepare(
String.format("INSERT INTO %s.%s (key, d) VALUES (?, %s(?))", KEYSPACE, currentTable(), fSin),
ClientState.forInternalCalls());
ResultMessage.Prepared preparedInsert2 = QueryProcessor.prepare(
String.format("INSERT INTO %s.%s (key, d) VALUES (?, ?)", KEYSPACE, currentTable()),
ClientState.forInternalCalls());
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedSelect1.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedSelect2.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedInsert1.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedInsert2.statementId));
execute("DROP FUNCTION " + fSin + "(double);");
// the statements which use the dropped function should be removed from cache, with the others remaining
Assert.assertNull(QueryProcessor.instance.getPrepared(preparedSelect1.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedSelect2.statementId));
Assert.assertNull(QueryProcessor.instance.getPrepared(preparedInsert1.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedInsert2.statementId));
execute("CREATE FUNCTION " + fSin + " ( input double ) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java " +
"AS 'return Double.valueOf(Math.sin(input));'");
Assert.assertEquals(1, Schema.instance.getFunctions(fSinName).size());
preparedSelect1= QueryProcessor.prepare(
String.format("SELECT key, %s(d) FROM %s.%s", fSin, KEYSPACE, currentTable()),
ClientState.forInternalCalls());
preparedInsert1 = QueryProcessor.prepare(
String.format("INSERT INTO %s.%s (key, d) VALUES (?, %s(?))", KEYSPACE, currentTable(), fSin),
ClientState.forInternalCalls());
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedSelect1.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedInsert1.statementId));
dropPerTestKeyspace();
// again, only the 2 statements referencing the function should be removed from cache
// this time because the statements select from tables in KEYSPACE, only the function
// is scoped to KEYSPACE_PER_TEST
Assert.assertNull(QueryProcessor.instance.getPrepared(preparedSelect1.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedSelect2.statementId));
Assert.assertNull(QueryProcessor.instance.getPrepared(preparedInsert1.statementId));
Assert.assertNotNull(QueryProcessor.instance.getPrepared(preparedInsert2.statementId));
}
@Test
public void testDropFunctionDropsPreparedStatementsWithDelayedValues() throws Throwable
{
// test that dropping a function removes stmts which use
// it to provide a DelayedValue collection from the
// cache in QueryProcessor
checkDelayedValuesCorrectlyIdentifyFunctionsInUse(false);
}
@Test
public void testDropKeyspaceContainingFunctionDropsPreparedStatementsWithDelayedValues() throws Throwable
{
// test that dropping a function removes stmts which use
// it to provide a DelayedValue collection from the
// cache in QueryProcessor
checkDelayedValuesCorrectlyIdentifyFunctionsInUse(true);
}
private ResultMessage.Prepared prepareStatementWithDelayedValue(CollectionType.Kind kind, String function)
{
String collectionType;
String literalArgs;
switch (kind)
{
case LIST:
collectionType = "list<double>";
literalArgs = String.format("[%s(0.0)]", function);
break;
case SET:
collectionType = "set<double>";
literalArgs = String.format("{%s(0.0)}", function);
break;
case MAP:
collectionType = "map<double, double>";
literalArgs = String.format("{%s(0.0):0.0}", function);
break;
default:
Assert.fail("Unsupported collection type " + kind);
collectionType = null;
literalArgs = null;
}
createTable("CREATE TABLE %s (" +
" key int PRIMARY KEY," +
" val " + collectionType + ')');
ResultMessage.Prepared prepared = QueryProcessor.prepare(
String.format("INSERT INTO %s.%s (key, val) VALUES (?, %s)",
KEYSPACE,
currentTable(),
literalArgs),
ClientState.forInternalCalls());
Assert.assertNotNull(QueryProcessor.instance.getPrepared(prepared.statementId));
return prepared;
}
private ResultMessage.Prepared prepareStatementWithDelayedValueTuple(String function)
{
createTable("CREATE TABLE %s (" +
" key int PRIMARY KEY," +
" val tuple<double> )");
ResultMessage.Prepared prepared = QueryProcessor.prepare(
String.format("INSERT INTO %s.%s (key, val) VALUES (?, (%s(0.0)))",
KEYSPACE,
currentTable(),
function),
ClientState.forInternalCalls());
Assert.assertNotNull(QueryProcessor.instance.getPrepared(prepared.statementId));
return prepared;
}
private void checkDelayedValuesCorrectlyIdentifyFunctionsInUse(boolean dropKeyspace) throws Throwable
{
// prepare a statement which doesn't use any function for a control
createTable("CREATE TABLE %s (" +
" key int PRIMARY KEY," +
" val double)");
ResultMessage.Prepared control = QueryProcessor.prepare(
String.format("INSERT INTO %s.%s (key, val) VALUES (?, ?)",
KEYSPACE,
currentTable()),
ClientState.forInternalCalls());
Assert.assertNotNull(QueryProcessor.instance.getPrepared(control.statementId));
// a function that we'll drop and verify that statements which use it to
// provide a DelayedValue are removed from the cache in QueryProcessor
String function = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE FUNCTION %s ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE javascript " +
"AS 'input'");
Assert.assertEquals(1, Schema.instance.getFunctions(parseFunctionName(function)).size());
List<ResultMessage.Prepared> prepared = new ArrayList<>();
// prepare statements which use the function to provide a DelayedValue
prepared.add(prepareStatementWithDelayedValue(CollectionType.Kind.LIST, function));
prepared.add(prepareStatementWithDelayedValue(CollectionType.Kind.SET, function));
prepared.add(prepareStatementWithDelayedValue(CollectionType.Kind.MAP, function));
prepared.add(prepareStatementWithDelayedValueTuple(function));
// what to drop - the function is scoped to the per-test keyspace, but the prepared statements
// select from the per-fixture keyspace. So if we drop the per-test keyspace, the function
// should be removed along with the statements that reference it. The control statement should
// remain present in the cache. Likewise, if we actually drop the function itself the control
// statement should not be removed, but the others should be
if (dropKeyspace)
dropPerTestKeyspace();
else
execute("DROP FUNCTION " + function);
Assert.assertNotNull(QueryProcessor.instance.getPrepared(control.statementId));
for (ResultMessage.Prepared removed : prepared)
Assert.assertNull(QueryProcessor.instance.getPrepared(removed.statementId));
}
@Test
public void testFunctionCreationAndDrop() throws Throwable
{
createTable("CREATE TABLE %s (key int PRIMARY KEY, d double)");
execute("INSERT INTO %s(key, d) VALUES (?, ?)", 1, 1d);
execute("INSERT INTO %s(key, d) VALUES (?, ?)", 2, 2d);
execute("INSERT INTO %s(key, d) VALUES (?, ?)", 3, 3d);
// simple creation
String fSin = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE FUNCTION %s ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java " +
"AS 'return Math.sin(input);'");
// check we can't recreate the same function
assertInvalidMessage("already exists",
"CREATE FUNCTION " + fSin + " ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java AS 'return Double.valueOf(Math.sin(input.doubleValue()));'");
// but that it doesn't comply with "IF NOT EXISTS"
execute("CREATE FUNCTION IF NOT EXISTS " + fSin + " ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java AS 'return Double.valueOf(Math.sin(input.doubleValue()));'");
// Validate that it works as expected
assertRows(execute("SELECT key, " + fSin + "(d) FROM %s"),
row(1, Math.sin(1d)),
row(2, Math.sin(2d)),
row(3, Math.sin(3d))
);
// Replace the method with incompatible return type
assertInvalidMessage("the new return type text is not compatible with the return type double of existing function",
"CREATE OR REPLACE FUNCTION " + fSin + " ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java AS 'return \"42d\";'");
// proper replacement
execute("CREATE OR REPLACE FUNCTION " + fSin + " ( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java AS 'return Double.valueOf(42d);'");
// Validate the method as been replaced
assertRows(execute("SELECT key, " + fSin + "(d) FROM %s"),
row(1, 42.0),
row(2, 42.0),
row(3, 42.0)
);
// same function but other keyspace
String fSin2 = createFunction(KEYSPACE, "double",
"CREATE FUNCTION %s ( input double ) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java " +
"AS 'return Math.sin(input);'");
assertRows(execute("SELECT key, " + fSin2 + "(d) FROM %s"),
row(1, Math.sin(1d)),
row(2, Math.sin(2d)),
row(3, Math.sin(3d))
);
// Drop
execute("DROP FUNCTION " + fSin);
execute("DROP FUNCTION " + fSin2);
// Drop unexisting function
assertInvalidMessage(String.format("Function '%s' doesn't exist", fSin), "DROP FUNCTION " + fSin);
// but don't complain with "IF EXISTS"
execute("DROP FUNCTION IF EXISTS " + fSin);
// can't drop native functions
assertInvalidMessage("System keyspace 'system' is not user-modifiable", "DROP FUNCTION totimestamp");
assertInvalidMessage("System keyspace 'system' is not user-modifiable", "DROP FUNCTION uuid");
// sin() no longer exists
assertInvalidMessage("Unknown function", "SELECT key, sin(d) FROM %s");
}
@Test
public void testFunctionExecution() throws Throwable
{
createTable("CREATE TABLE %s (v text PRIMARY KEY)");
execute("INSERT INTO %s(v) VALUES (?)", "aaa");
String fRepeat = createFunction(KEYSPACE_PER_TEST, "text,int",
"CREATE FUNCTION %s(v text, n int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java " +
"AS 'StringBuilder sb = new StringBuilder();\n" +
" for (int i = 0; i < n; i++)\n" +
" sb.append(v);\n" +
" return sb.toString();'");
assertRows(execute("SELECT v FROM %s WHERE v=" + fRepeat + "(?, ?)", "a", 3), row("aaa"));
assertEmpty(execute("SELECT v FROM %s WHERE v=" + fRepeat + "(?, ?)", "a", 2));
}
@Test
public void testFunctionExecutionWithReversedTypeAsOutput() throws Throwable
{
createTable("CREATE TABLE %s (k int, v text, PRIMARY KEY(k, v)) WITH CLUSTERING ORDER BY (v DESC)");
String fRepeat = createFunction(KEYSPACE_PER_TEST, "text",
"CREATE FUNCTION %s(v text) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java " +
"AS 'return v + v;'");
execute("INSERT INTO %s(k, v) VALUES (?, " + fRepeat + "(?))", 1, "a");
}
@Test
public void testFunctionOverloading() throws Throwable
{
createTable("CREATE TABLE %s (k text PRIMARY KEY, v int)");
execute("INSERT INTO %s(k, v) VALUES (?, ?)", "f2", 1);
String fOverload = createFunction(KEYSPACE_PER_TEST, "varchar",
"CREATE FUNCTION %s ( input varchar ) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java " +
"AS 'return \"f1\";'");
createFunctionOverload(fOverload,
"int",
"CREATE OR REPLACE FUNCTION %s(i int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java " +
"AS 'return \"f2\";'");
createFunctionOverload(fOverload,
"text,text",
"CREATE OR REPLACE FUNCTION %s(v1 text, v2 text) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java " +
"AS 'return \"f3\";'");
createFunctionOverload(fOverload,
"ascii",
"CREATE OR REPLACE FUNCTION %s(v ascii) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java " +
"AS 'return \"f1\";'");
// text == varchar, so this should be considered as a duplicate
assertInvalidMessage("already exists",
"CREATE FUNCTION " + fOverload + "(v varchar) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java AS 'return \"f1\";'");
assertRows(execute("SELECT " + fOverload + "(k), " + fOverload + "(v), " + fOverload + "(k, k) FROM %s"),
row("f1", "f2", "f3")
);
forcePreparedValues();
// This shouldn't work if we use preparation since there no way to know which overload to use
assertInvalidMessage("Ambiguous call to function", "SELECT v FROM %s WHERE k = " + fOverload + "(?)", "foo");
stopForcingPreparedValues();
// but those should since we specifically cast
assertEmpty(execute("SELECT v FROM %s WHERE k = " + fOverload + "((text)?)", "foo"));
assertRows(execute("SELECT v FROM %s WHERE k = " + fOverload + "((int)?)", 3), row(1));
assertEmpty(execute("SELECT v FROM %s WHERE k = " + fOverload + "((ascii)?)", "foo"));
// And since varchar == text, this should work too
assertEmpty(execute("SELECT v FROM %s WHERE k = " + fOverload + "((varchar)?)", "foo"));
// no such functions exist...
assertInvalidMessage(String.format("Function '%s(boolean)' doesn't exist", fOverload), "DROP FUNCTION " + fOverload + "(boolean)");
assertInvalidMessage(String.format("Function '%s(bigint)' doesn't exist", fOverload), "DROP FUNCTION " + fOverload + "(bigint)");
// 'overloaded' has multiple overloads - so it has to fail (CASSANDRA-7812)
assertInvalidMessage("matches multiple function definitions", "DROP FUNCTION " + fOverload);
execute("DROP FUNCTION " + fOverload + "(varchar)");
assertInvalidMessage("none of its type signatures match", "SELECT v FROM %s WHERE k = " + fOverload + "((text)?)", "foo");
execute("DROP FUNCTION " + fOverload + "(text, text)");
assertInvalidMessage("none of its type signatures match", "SELECT v FROM %s WHERE k = " + fOverload + "((text)?,(text)?)", "foo", "bar");
execute("DROP FUNCTION " + fOverload + "(ascii)");
assertInvalidMessage("cannot be passed as argument 0 of function", "SELECT v FROM %s WHERE k = " + fOverload + "((ascii)?)", "foo");
// single-int-overload must still work
assertRows(execute("SELECT v FROM %s WHERE k = " + fOverload + "((int)?)", 3), row(1));
// overloaded has just one overload now - so the following DROP FUNCTION is not ambigious (CASSANDRA-7812)
execute("DROP FUNCTION " + fOverload);
}
@Test
public void testCreateOrReplaceJavaFunction() throws Throwable
{
createTable("CREATE TABLE %s (key int primary key, val double)");
execute("INSERT INTO %s (key, val) VALUES (?, ?)", 1, 1d);
execute("INSERT INTO %s (key, val) VALUES (?, ?)", 2, 2d);
execute("INSERT INTO %s (key, val) VALUES (?, ?)", 3, 3d);
String fName = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE FUNCTION %s( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java " +
"AS '\n" +
" // parameter val is of type java.lang.Double\n" +
" /* return type is of type java.lang.Double */\n" +
" if (input == null) {\n" +
" return null;\n" +
" }\n" +
" return Math.sin( input );\n" +
"';");
// just check created function
assertRows(execute("SELECT key, val, " + fName + "(val) FROM %s"),
row(1, 1d, Math.sin(1d)),
row(2, 2d, Math.sin(2d)),
row(3, 3d, Math.sin(3d))
);
execute("CREATE OR REPLACE FUNCTION " + fName + "( input double ) " +
"CALLED ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE java\n" +
"AS '\n" +
" return input;\n" +
"';");
// check if replaced function returns correct result
assertRows(execute("SELECT key, val, " + fName + "(val) FROM %s"),
row(1, 1d, 1d),
row(2, 2d, 2d),
row(3, 3d, 3d)
);
}
@Test
public void testFunctionInTargetKeyspace() throws Throwable
{
createTable("CREATE TABLE %s (key int primary key, val double)");
execute("CREATE TABLE " + KEYSPACE_PER_TEST + ".second_tab (key int primary key, val double)");
String fName = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE OR REPLACE FUNCTION %s(val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE JAVA " +
"AS 'return Double.valueOf(val);';");
execute("INSERT INTO %s (key, val) VALUES (?, ?)", 1, 1d);
execute("INSERT INTO %s (key, val) VALUES (?, ?)", 2, 2d);
execute("INSERT INTO %s (key, val) VALUES (?, ?)", 3, 3d);
assertInvalidMessage("Unknown function",
"SELECT key, val, " + parseFunctionName(fName).name + "(val) FROM %s");
execute("INSERT INTO " + KEYSPACE_PER_TEST + ".second_tab (key, val) VALUES (?, ?)", 1, 1d);
execute("INSERT INTO " + KEYSPACE_PER_TEST + ".second_tab (key, val) VALUES (?, ?)", 2, 2d);
execute("INSERT INTO " + KEYSPACE_PER_TEST + ".second_tab (key, val) VALUES (?, ?)", 3, 3d);
assertRows(execute("SELECT key, val, " + fName + "(val) FROM " + KEYSPACE_PER_TEST + ".second_tab"),
row(1, 1d, 1d),
row(2, 2d, 2d),
row(3, 3d, 3d)
);
}
@Test
public void testFunctionWithReservedName() throws Throwable
{
execute("CREATE TABLE " + KEYSPACE_PER_TEST + ".second_tab (key int primary key, val double)");
String fName = createFunction(KEYSPACE_PER_TEST, "",
"CREATE OR REPLACE FUNCTION %s() " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS timestamp " +
"LANGUAGE JAVA " +
"AS 'return null;';");
execute("INSERT INTO " + KEYSPACE_PER_TEST + ".second_tab (key, val) VALUES (?, ?)", 1, 1d);
execute("INSERT INTO " + KEYSPACE_PER_TEST + ".second_tab (key, val) VALUES (?, ?)", 2, 2d);
execute("INSERT INTO " + KEYSPACE_PER_TEST + ".second_tab (key, val) VALUES (?, ?)", 3, 3d);
// ensure that system now() is executed
UntypedResultSet rows = execute("SELECT key, val, now() FROM " + KEYSPACE_PER_TEST + ".second_tab");
Assert.assertEquals(3, rows.size());
UntypedResultSet.Row row = rows.iterator().next();
Date ts = row.getTimestamp(row.getColumns().get(2).name.toString());
Assert.assertNotNull(ts);
// ensure that KEYSPACE_PER_TEST's now() is executed
rows = execute("SELECT key, val, " + fName + "() FROM " + KEYSPACE_PER_TEST + ".second_tab");
Assert.assertEquals(3, rows.size());
row = rows.iterator().next();
Assert.assertFalse(row.has(row.getColumns().get(2).name.toString()));
}
@Test
public void testFunctionInSystemKS() throws Throwable
{
execute("CREATE OR REPLACE FUNCTION " + KEYSPACE + ".totimestamp(val timeuuid) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS timestamp " +
"LANGUAGE JAVA\n" +
"AS 'return null;';");
assertInvalidMessage("System keyspace 'system' is not user-modifiable",
"CREATE OR REPLACE FUNCTION system.jnft(val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE JAVA\n" +
"AS 'return null;';");
assertInvalidMessage("System keyspace 'system' is not user-modifiable",
"CREATE OR REPLACE FUNCTION system.totimestamp(val timeuuid) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS timestamp " +
"LANGUAGE JAVA\n" +
"AS 'return null;';");
assertInvalidMessage("System keyspace 'system' is not user-modifiable",
"DROP FUNCTION system.now");
// KS for executeLocally() is system
assertInvalidMessage("System keyspace 'system' is not user-modifiable",
"CREATE OR REPLACE FUNCTION jnft(val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE JAVA\n" +
"AS 'return null;';");
assertInvalidMessage("System keyspace 'system' is not user-modifiable",
"CREATE OR REPLACE FUNCTION totimestamp(val timeuuid) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS timestamp " +
"LANGUAGE JAVA\n" +
"AS 'return null;';");
assertInvalidMessage("System keyspace 'system' is not user-modifiable",
"DROP FUNCTION now");
}
@Test
public void testFunctionNonExistingKeyspace() throws Throwable
{
assertInvalidMessage("Keyspace 'this_ks_does_not_exist' doesn't exist",
"CREATE OR REPLACE FUNCTION this_ks_does_not_exist.jnft(val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE JAVA\n" +
"AS 'return null;';");
}
@Test
public void testFunctionAfterOnDropKeyspace() throws Throwable
{
dropPerTestKeyspace();
assertInvalidMessage("Keyspace '" + KEYSPACE_PER_TEST + "' doesn't exist",
"CREATE OR REPLACE FUNCTION " + KEYSPACE_PER_TEST + ".jnft(val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE JAVA\n" +
"AS 'return null;';");
}
@Test
public void testWrongKeyspace() throws Throwable
{
String typeName = createType("CREATE TYPE %s (txt text, i int)");
String type = KEYSPACE + '.' + typeName;
assertInvalidMessage(String.format("Statement on keyspace %s cannot refer to a user type in keyspace %s; user types can only be used in the keyspace they are defined in",
KEYSPACE_PER_TEST, KEYSPACE),
"CREATE FUNCTION " + KEYSPACE_PER_TEST + ".test_wrong_ks( val int ) " +
"CALLED ON NULL INPUT " +
"RETURNS " + type + " " +
"LANGUAGE java\n" +
"AS $$return val;$$;");
assertInvalidMessage(String.format("Statement on keyspace %s cannot refer to a user type in keyspace %s; user types can only be used in the keyspace they are defined in",
KEYSPACE_PER_TEST, KEYSPACE),
"CREATE FUNCTION " + KEYSPACE_PER_TEST + ".test_wrong_ks( val " + type + " ) " +
"CALLED ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE java\n" +
"AS $$return val;$$;");
}
@Test
public void testUserTypeDrop() throws Throwable
{
String type = KEYSPACE + '.' + createType("CREATE TYPE %s (txt text, i int)");
createTable("CREATE TABLE %s (key int primary key, udt frozen<" + type + ">)");
String fName = createFunction(KEYSPACE, type,
"CREATE FUNCTION %s( udt " + type + " ) " +
"CALLED ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE java " +
"AS $$return " +
" Integer.valueOf(udt.getInt(\"i\"));$$;");
FunctionName fNameName = parseFunctionName(fName);
Assert.assertEquals(1, Schema.instance.getFunctions(fNameName).size());
ResultMessage.Prepared prepared = QueryProcessor.prepare(String.format("SELECT key, %s(udt) FROM %s.%s", fName, KEYSPACE, currentTable()),
ClientState.forInternalCalls());
Assert.assertNotNull(QueryProcessor.instance.getPrepared(prepared.statementId));
// UT still referenced by table
assertInvalidMessage("Cannot drop user type", "DROP TYPE " + type);
execute("DROP TABLE %s");
// UT still referenced by UDF
assertInvalidMessage("as it is still used by function", "DROP TYPE " + type);
Assert.assertNull(QueryProcessor.instance.getPrepared(prepared.statementId));
// function stays
Assert.assertEquals(1, Schema.instance.getFunctions(fNameName).size());
}
@Test
public void testDuplicateArgNames() throws Throwable
{
assertInvalidMessage("Duplicate argument names for given function",
"CREATE OR REPLACE FUNCTION " + KEYSPACE + ".scrinv(val double, val text) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE javascript\n" +
"AS '\"foo bar\";';");
}
@Test
public void testReplaceAllowNulls() throws Throwable
{
String fNulls = createFunction(KEYSPACE,
"int",
"CREATE OR REPLACE FUNCTION %s(val int) " +
"CALLED ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java\n" +
"AS 'return \"foo bar\";';");
String fNoNulls = createFunction(KEYSPACE,
"int",
"CREATE OR REPLACE FUNCTION %s(val int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java\n" +
"AS 'return \"foo bar\";';");
assertInvalid("CREATE OR REPLACE FUNCTION " + fNulls + "(val int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java\n" +
"AS 'return \"foo bar\";';");
assertInvalid("CREATE OR REPLACE FUNCTION " + fNoNulls + "(val int) " +
"CALLED ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java\n" +
"AS 'return \"foo bar\";';");
execute("CREATE OR REPLACE FUNCTION " + fNulls + "(val int) " +
"CALLED ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java\n" +
"AS 'return \"foo bar\";';");
execute("CREATE OR REPLACE FUNCTION " + fNoNulls + "(val int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE java\n" +
"AS 'return \"foo bar\";';");
}
@Test
public void testBrokenFunction() throws Throwable
{
createTable("CREATE TABLE %s (key int primary key, dval double)");
execute("INSERT INTO %s (key, dval) VALUES (?, ?)", 1, 1d);
String fName = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE OR REPLACE FUNCTION %s(val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE JAVA\n" +
"AS 'throw new RuntimeException();';");
KeyspaceMetadata ksm = Schema.instance.getKeyspaceMetadata(KEYSPACE_PER_TEST);
UDFunction f = (UDFunction) ksm.functions.get(parseFunctionName(fName)).iterator().next();
UDFunction broken = UDFunction.createBrokenFunction(f.name(),
f.argNames(),
f.argTypes(),
f.returnType(),
true,
"java",
f.body(),
new InvalidRequestException("foo bar is broken"));
Schema.instance.load(ksm.withSwapped(ksm.functions.without(f.name(), f.argTypes()).with(broken)));
assertInvalidThrowMessage("foo bar is broken", InvalidRequestException.class,
"SELECT key, " + fName + "(dval) FROM %s");
}
@Test
public void testFunctionExecutionExceptionNet() throws Throwable
{
createTable("CREATE TABLE %s (key int primary key, dval double)");
execute("INSERT INTO %s (key, dval) VALUES (?, ?)", 1, 1d);
String fName = createFunction(KEYSPACE_PER_TEST, "double",
"CREATE OR REPLACE FUNCTION %s(val double) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS double " +
"LANGUAGE JAVA\n" +
"AS 'throw new RuntimeException();'");
for (ProtocolVersion version : PROTOCOL_VERSIONS)
{
try
{
assertRowsNet(version,
executeNet(version, "SELECT " + fName + "(dval) FROM %s WHERE key = 1"));
Assert.fail();
}
catch (com.datastax.driver.core.exceptions.FunctionExecutionException fee)
{
// Java driver neither throws FunctionExecutionException nor does it set the exception code correctly
Assert.assertTrue(version.isGreaterOrEqualTo(ProtocolVersion.V4));
}
catch (InvalidQueryException e)
{
Assert.assertTrue(version.isSmallerThan(ProtocolVersion.V4));
}
}
}
@Test
public void testEmptyString() throws Throwable
{
createTable("CREATE TABLE %s (key int primary key, sval text, aval ascii, bval blob, empty_int int)");
execute("INSERT INTO %s (key, sval, aval, bval, empty_int) VALUES (?, ?, ?, ?, blobAsInt(0x))", 1, "", "", ByteBuffer.allocate(0));
String fNameSRC = createFunction(KEYSPACE_PER_TEST, "text",
"CREATE OR REPLACE FUNCTION %s(val text) " +
"CALLED ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE JAVA\n" +
"AS 'return val;'");
String fNameSCC = createFunction(KEYSPACE_PER_TEST, "text",
"CREATE OR REPLACE FUNCTION %s(val text) " +
"CALLED ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE JAVA\n" +
"AS 'return \"\";'");
String fNameSRN = createFunction(KEYSPACE_PER_TEST, "text",
"CREATE OR REPLACE FUNCTION %s(val text) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE JAVA\n" +
"AS 'return val;'");
String fNameSCN = createFunction(KEYSPACE_PER_TEST, "text",
"CREATE OR REPLACE FUNCTION %s(val text) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS text " +
"LANGUAGE JAVA\n" +
"AS 'return \"\";'");
String fNameBRC = createFunction(KEYSPACE_PER_TEST, "blob",
"CREATE OR REPLACE FUNCTION %s(val blob) " +
"CALLED ON NULL INPUT " +
"RETURNS blob " +
"LANGUAGE JAVA\n" +
"AS 'return val;'");
String fNameBCC = createFunction(KEYSPACE_PER_TEST, "blob",
"CREATE OR REPLACE FUNCTION %s(val blob) " +
"CALLED ON NULL INPUT " +
"RETURNS blob " +
"LANGUAGE JAVA\n" +
"AS 'return ByteBuffer.allocate(0);'");
String fNameBRN = createFunction(KEYSPACE_PER_TEST, "blob",
"CREATE OR REPLACE FUNCTION %s(val blob) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS blob " +
"LANGUAGE JAVA\n" +
"AS 'return val;'");
String fNameBCN = createFunction(KEYSPACE_PER_TEST, "blob",
"CREATE OR REPLACE FUNCTION %s(val blob) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS blob " +
"LANGUAGE JAVA\n" +
"AS 'return ByteBuffer.allocate(0);'");
String fNameIRC = createFunction(KEYSPACE_PER_TEST, "int",
"CREATE OR REPLACE FUNCTION %s(val int) " +
"CALLED ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE JAVA\n" +
"AS 'return val;'");
String fNameICC = createFunction(KEYSPACE_PER_TEST, "int",
"CREATE OR REPLACE FUNCTION %s(val int) " +
"CALLED ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE JAVA\n" +
"AS 'return 0;'");
String fNameIRN = createFunction(KEYSPACE_PER_TEST, "int",
"CREATE OR REPLACE FUNCTION %s(val int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE JAVA\n" +
"AS 'return val;'");
String fNameICN = createFunction(KEYSPACE_PER_TEST, "int",
"CREATE OR REPLACE FUNCTION %s(val int) " +
"RETURNS NULL ON NULL INPUT " +
"RETURNS int " +
"LANGUAGE JAVA\n" +
"AS 'return 0;'");
assertRows(execute("SELECT " + fNameSRC + "(sval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameSRN + "(sval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameSCC + "(sval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameSCN + "(sval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameSRC + "(aval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameSRN + "(aval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameSCC + "(aval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameSCN + "(aval) FROM %s"), row(""));
assertRows(execute("SELECT " + fNameBRC + "(bval) FROM %s"), row(ByteBufferUtil.EMPTY_BYTE_BUFFER));
assertRows(execute("SELECT " + fNameBRN + "(bval) FROM %s"), row(ByteBufferUtil.EMPTY_BYTE_BUFFER));
assertRows(execute("SELECT " + fNameBCC + "(bval) FROM %s"), row(ByteBufferUtil.EMPTY_BYTE_BUFFER));
assertRows(execute("SELECT " + fNameBCN + "(bval) FROM %s"), row(ByteBufferUtil.EMPTY_BYTE_BUFFER));
assertRows(execute("SELECT " + fNameIRC + "(empty_int) FROM %s"), row(new Object[]{ null }));
assertRows(execute("SELECT " + fNameIRN + "(empty_int) FROM %s"), row(new Object[]{ null }));
assertRows(execute("SELECT " + fNameICC + "(empty_int) FROM %s"), row(0));
assertRows(execute("SELECT " + fNameICN + "(empty_int) FROM %s"), row(new Object[]{ null }));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.util.lang.GridAbsClosure;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteInClosure;
import org.jetbrains.annotations.Nullable;
/**
* Wrapper class around the {@link Process} suited to run any Java class as separate java process.
* <p>
* This launcher supports simple interchange-with-a-process protocol to talk (in fact, listen) to process.
* For the moment the only message in protocol is run process PID. Class-to-run should print it's PID
* prefixed with {@code #PID_MSG_PREFIX} to tell the GridJavaProcess it's PID.
* <p>
* Protocol transport is any of (or both) <i>system.out</i> and/or <i>system.err</i>,
* so any protocol message should be printed in the class-to-run.
* <p>
* NOTE 1: For the moment inner class running is not supported.
* <p>
* NOTE 2: This util class should work fine on Linux, Mac OS and Windows.
*/
public final class GridJavaProcess {
/** Internal protocol message prefix saying that the next text in the outputted line is pid. */
public static final String PID_MSG_PREFIX = "my_pid_is:";
/** Logger */
private IgniteLogger log;
/** Wrapped system process. */
private Process proc;
/** Pid of wrapped process. Made as array to be changeable in nested static class. */
private volatile String pid = "-1";
/** system.out stream grabber for process in which user class is running. */
private ProcessStreamGrabber osGrabber;
/** system.err stream grabber for process in which user class is running. */
private ProcessStreamGrabber esGrabber;
/** Closure to be called when process termination is detected. */
private GridAbsClosure procKilledC;
/**
* Private constructor to promote factory method usage.
*/
private GridJavaProcess() {
// No-op
}
/**
* Executes main() method of the given class in a separate system process.
*
* @param cls Class with main() method to be run.
* @param params main() method parameters.
* @param printC Optional closure to be called each time wrapped process prints line to system.out or system.err.
* @param procKilledC Optional closure to be called when process termination is detected.
* @param log Log to use.
* @return Wrapper around {@link Process}
* @throws Exception If any problem occurred.
*/
public static GridJavaProcess exec(Class cls, String params, @Nullable IgniteLogger log,
@Nullable IgniteInClosure<String> printC, @Nullable GridAbsClosure procKilledC) throws Exception {
return exec(cls.getCanonicalName(), params, log, printC, procKilledC, null, null, null);
}
/**
* Executes main() method of the given class in a separate system process.
*
* @param cls Class with main() method to be run.
* @param params main() method parameters.
* @param printC Optional closure to be called each time wrapped process prints line to system.out or system.err.
* @param procKilledC Optional closure to be called when process termination is detected.
* @param log Log to use.
* @param jvmArgs JVM arguments to use.
* @param cp Additional classpath.
* @return Wrapper around {@link Process}
* @throws Exception If any problem occurred.
*/
public static GridJavaProcess exec(Class cls, String params, @Nullable IgniteLogger log,
@Nullable IgniteInClosure<String> printC, @Nullable GridAbsClosure procKilledC,
@Nullable Collection<String> jvmArgs, @Nullable String cp) throws Exception {
return exec(cls.getCanonicalName(), params, log, printC, procKilledC, null, jvmArgs, cp);
}
/**
* Executes main() method of the given class in a separate system process.
*
* @param clsName Class with main() method to be run.
* @param params main() method parameters.
* @param log Log to use.
* @param printC Optional closure to be called each time wrapped process prints line to system.out or system.err.
* @param procKilledC Optional closure to be called when process termination is detected.
* @param javaHome Java home location. The process will be started under given JVM.
* @param jvmArgs JVM arguments to use.
* @param cp Additional classpath.
* @return Wrapper around {@link Process}
* @throws Exception If any problem occurred.
*/
public static GridJavaProcess exec(String clsName, String params, @Nullable IgniteLogger log,
@Nullable IgniteInClosure<String> printC, @Nullable GridAbsClosure procKilledC,
@Nullable String javaHome, @Nullable Collection<String> jvmArgs, @Nullable String cp) throws Exception {
GridJavaProcess gjProc = new GridJavaProcess();
gjProc.log = log;
gjProc.procKilledC = procKilledC;
List<String> procParams = params == null || params.isEmpty() ?
Collections.<String>emptyList() : Arrays.asList(params.split(" "));
List<String> procCommands = new ArrayList<>();
String javaBin = (javaHome == null ? System.getProperty("java.home") : javaHome) +
File.separator + "bin" + File.separator + "java";
procCommands.add(javaBin);
procCommands.addAll(jvmArgs == null ? U.jvmArgs() : jvmArgs);
if (jvmArgs == null || (!jvmArgs.contains("-cp") && !jvmArgs.contains("-classpath"))) {
String classpath = System.getProperty("java.class.path");
String sfcp = System.getProperty("surefire.test.class.path");
if (sfcp != null)
classpath += System.getProperty("path.separator") + sfcp;
if (cp != null)
classpath += System.getProperty("path.separator") + cp;
procCommands.add("-cp");
procCommands.add(classpath);
}
procCommands.add(clsName);
procCommands.addAll(procParams);
ProcessBuilder builder = new ProcessBuilder(procCommands);
builder.redirectErrorStream(true);
Process proc = builder.start();
gjProc.osGrabber = gjProc.new ProcessStreamGrabber(proc.getInputStream(), printC);
gjProc.esGrabber = gjProc.new ProcessStreamGrabber(proc.getErrorStream(), printC);
gjProc.osGrabber.start();
gjProc.esGrabber.start();
gjProc.proc = proc;
return gjProc;
}
/**
* Kills the java process.
*
* @throws Exception If any problem occurred.
*/
public void kill() throws Exception {
Process killProc = U.isWindows() ?
Runtime.getRuntime().exec(new String[] {"taskkill", "/pid", pid, "/f", "/t"}) :
Runtime.getRuntime().exec(new String[] {"kill", "-9", pid});
killProc.waitFor();
int exitVal = killProc.exitValue();
if (exitVal != 0 && log.isInfoEnabled())
log.info(String.format("Abnormal exit value of %s for pid %s", exitVal, pid));
if (procKilledC != null)
procKilledC.apply();
U.interrupt(osGrabber);
U.interrupt(esGrabber);
U.join(osGrabber, log);
U.join(esGrabber, log);
}
/**
* Kills process using {@link Process#destroy()}.
*/
public void killProcess() {
proc.destroy();
if (procKilledC != null)
procKilledC.apply();
U.interrupt(osGrabber);
U.interrupt(esGrabber);
U.join(osGrabber, log);
U.join(esGrabber, log);
}
/**
* Returns pid of the java process.
* Wrapped java class should print it's PID to system.out or system.err to make wrapper know about it.
*
* @return Pid of the java process or -1 if pid is unknown.
*/
public int getPid() {
return Integer.valueOf(pid);
}
/**
* Exposes wrapped java Process.
*
* @return Wrapped java process.
*/
public Process getProcess() {
return proc;
}
/**
* Class which grabs sys.err and sys.out of the running process in separate thread
* and implements the interchange-with-a-process protocol.
*/
private class ProcessStreamGrabber extends Thread {
/** Stream to grab. */
private final InputStream streamToGrab;
/** Closure to be called when process termination is detected. */
private final IgniteInClosure<String> printC;
/**
* Creates the ProcessStreamGrabber bounded to the given Process.
*
* @param streamToGrab Stream to grab.
* @param printC Optional closure to be called each time wrapped process prints line to system.out or system.err.
*/
ProcessStreamGrabber(InputStream streamToGrab, @Nullable IgniteInClosure<String> printC) {
this.streamToGrab = streamToGrab;
this.printC = printC;
}
/**
* Starts the ProcessStreamGrabber.
*/
@Override public void run() {
try {
BufferedReader br = new BufferedReader(new InputStreamReader(streamToGrab));
String line;
while ((line = br.readLine()) != null && !isInterrupted()) {
if (line.startsWith(PID_MSG_PREFIX))
pid = line.substring(PID_MSG_PREFIX.length());
else
if (printC != null)
printC.apply(line);
}
}
catch (IOException e) {
U.error(log, "Caught IOException while grabbing stream", e);
try {
// Check if process is still alive.
proc.exitValue();
if (procKilledC != null)
procKilledC.apply();
}
catch (IllegalThreadStateException e1) {
if (!interrupted())
U.error(log, "Failed to get exit value from process.", e1);
}
}
}
/**
* Interrupts a thread and closes process streams.
*/
@Override public void interrupt() {
super.interrupt();
// Close all Process streams to free allocated resources, see http://kylecartmell.com/?p=9.
U.closeQuiet(proc.getErrorStream());
U.closeQuiet(proc.getInputStream());
U.closeQuiet(proc.getOutputStream());
}
}
}
| |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.reactivex.netty.test.util;
import io.reactivex.netty.client.events.ClientEventListener;
import io.reactivex.netty.test.util.MockConnectionEventListener.Event;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
public class MockClientEventListener extends ClientEventListener {
public enum ClientEvent {
ConnectStart, ConnectSuccess, ConnectFailed, ReleaseStart, ReleaseSuccess, ReleaseFailed, Eviction, Reuse,
AcquireStart, AcquireSuccess, AcquireFailed
}
private final List<ClientEvent> methodsCalled = new ArrayList<>();
private long duration;
private TimeUnit timeUnit;
private Throwable recievedError;
private final MockConnectionEventListener delegate = new MockConnectionEventListener();
@Override
public void onConnectStart() {
methodsCalled.add(ClientEvent.ConnectStart);
}
@Override
public void onConnectSuccess(long duration, TimeUnit timeUnit) {
this.duration = duration;
this.timeUnit = timeUnit;
methodsCalled.add(ClientEvent.ConnectSuccess);
}
@Override
public void onConnectFailed(long duration, TimeUnit timeUnit, Throwable recievedError) {
methodsCalled.add(ClientEvent.ConnectFailed);
this.duration = duration;
this.timeUnit = timeUnit;
this.recievedError = recievedError;
}
@Override
public void onPoolReleaseStart() {
methodsCalled.add(ClientEvent.ReleaseStart);
}
@Override
public void onPoolReleaseSuccess(long duration, TimeUnit timeUnit) {
methodsCalled.add(ClientEvent.ReleaseSuccess);
this.duration = duration;
this.timeUnit = timeUnit;
}
@Override
public void onPoolReleaseFailed(long duration, TimeUnit timeUnit, Throwable recievedError) {
methodsCalled.add(ClientEvent.ReleaseFailed);
this.duration = duration;
this.timeUnit = timeUnit;
this.recievedError = recievedError;
}
@Override
public void onPooledConnectionEviction() {
methodsCalled.add(ClientEvent.Eviction);
}
@Override
public void onPooledConnectionReuse() {
methodsCalled.add(ClientEvent.Reuse);
}
@Override
public void onPoolAcquireStart() {
methodsCalled.add(ClientEvent.AcquireStart);
}
@Override
public void onPoolAcquireSuccess(long duration, TimeUnit timeUnit) {
methodsCalled.add(ClientEvent.AcquireSuccess);
this.duration = duration;
this.timeUnit = timeUnit;
}
@Override
public void onPoolAcquireFailed(long duration, TimeUnit timeUnit, Throwable recievedError) {
this.duration = duration;
this.timeUnit = timeUnit;
this.recievedError = recievedError;
methodsCalled.add(ClientEvent.AcquireFailed);
}
@Override
public void onConnectionCloseFailed(long duration, TimeUnit timeUnit,
Throwable recievedError) {
delegate.onConnectionCloseFailed(duration, timeUnit, recievedError);
}
@Override
public void onConnectionCloseSuccess(long duration, TimeUnit timeUnit) {
delegate.onConnectionCloseSuccess(duration, timeUnit);
}
@Override
public void onConnectionCloseStart() {
delegate.onConnectionCloseStart();
}
@Override
public void onWriteFailed(long duration, TimeUnit timeUnit, Throwable throwable) {
delegate.onWriteFailed(duration, timeUnit, throwable);
}
@Override
public void onWriteSuccess(long duration, TimeUnit timeUnit, long bytesWritten) {
delegate.onWriteSuccess(duration, timeUnit, bytesWritten);
}
@Override
public void onWriteStart() {
delegate.onWriteStart();
}
@Override
public void onFlushFailed(long duration, TimeUnit timeUnit, Throwable throwable) {
delegate.onFlushFailed(duration, timeUnit, throwable);
}
@Override
public void onFlushSuccess(long duration, TimeUnit timeUnit) {
delegate.onFlushSuccess(duration, timeUnit);
}
@Override
public void onFlushStart() {
delegate.onFlushStart();
}
@Override
public void onByteRead(long bytesRead) {
delegate.onByteRead(bytesRead);
}
@Override
public void onCustomEvent(Object event) {
delegate.onCustomEvent(event);
}
@Override
public void onCustomEvent(Object event, long duration, TimeUnit timeUnit) {
delegate.onCustomEvent(event, duration, timeUnit);
}
@Override
public void onCustomEvent(Object event, long duration, TimeUnit timeUnit, Throwable throwable) {
delegate.onCustomEvent(event, duration, timeUnit, throwable);
}
@Override
public void onCustomEvent(Object event, Throwable throwable) {
delegate.onCustomEvent(event, throwable);
}
@Override
public void onCompleted() {
delegate.onCompleted();
}
public void assertMethodsCalled(Event... events) {
delegate.assertMethodsCalled(events);
}
public void assertMethodsCalled(ClientEvent... events) {
assertThat("Unexpected methods called count.", methodsCalled, hasSize(events.length));
assertThat("Unexpected methods called.", methodsCalled, contains(events));
}
public long getDuration() {
return duration;
}
public TimeUnit getTimeUnit() {
return timeUnit;
}
public Throwable getRecievedError() {
return recievedError;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.indexing.containers;
import com.intellij.util.indexing.ValueContainer;
import gnu.trove.TIntProcedure;
public class SortedIdSet implements Cloneable, RandomAccessIntContainer {
private int[] mySet;
private int mySetLength;
private int mySize;
public SortedIdSet(final int initialCapacity) {
assert initialCapacity < Short.MAX_VALUE;
mySet = new int[initialCapacity]; // todo slightly increase size
}
public SortedIdSet(final int[] array, int size) {
mySet = array;
mySetLength = mySize = size;
}
public boolean isEmpty() {
return mySize == 0;
}
public int size() {
return mySize;
}
public boolean add(int value) {
assert value > 0;
int pos;
if (mySetLength == 0 || (mySetLength > 0 && Math.abs(mySet[mySetLength -1]) < value)) {
pos = -mySetLength-1; // most of the time during bulk indexing we add near the end
}
else {
pos = binarySearch(mySet, 0, mySetLength, value);
}
if (pos >= 0) {
if (mySet[pos] > 0) return false;
pos = -pos - 1; // found removed
}
if (mySetLength == mySet.length) {
int nextArraySize = mySet.length < 1024 ? mySet.length << 1 : mySet.length + mySet.length / 5;
int[] newSet = new int[nextArraySize];
System.arraycopy(mySet, 0, newSet, 0, mySet.length);
mySet = newSet;
}
pos = -pos - 1;
boolean lengthIsIncreased = pos == mySetLength; // insert at end
if (!lengthIsIncreased && Math.abs(mySet[pos]) != value) { // todo we can shift until first removed
System.arraycopy(mySet, pos, mySet, pos + 1, mySetLength - pos);
lengthIsIncreased = true;
}
mySet[pos] = value;
++mySize;
if (lengthIsIncreased) ++mySetLength;
return true;
}
public boolean remove(int value) {
assert value > 0;
int pos = binarySearch(mySet, 0, mySetLength, value);
if (pos < 0 || mySet[pos] < 0) return false;
mySet[pos] = -value;
//if (pos != mySetLength - 1) System.arraycopy(mySet, pos + 1, mySet, pos, mySetLength - pos - 1);
--mySize;
//--mySetLength;
return true;
}
@Override
public IntIdsIterator intIterator() {
return new Iterator();
}
@Override
public ValueContainer.IntPredicate intPredicate() {
return new ValueContainer.IntPredicate() {
@Override
public boolean contains(int id) {
return SortedIdSet.this.contains(id);
}
};
}
private class Iterator implements IntIdsIterator {
private int myCursor;
Iterator() {
myCursor = findNext(0);
}
@Override
public boolean hasNext() {
return myCursor != -1;
}
@Override
public int next() {
int result = get(myCursor);
myCursor = findNext(myCursor + 1);
return result;
}
@Override
public int size() {
return SortedIdSet.this.size();
}
@Override
public boolean hasAscendingOrder() {
return true;
}
@Override
public IntIdsIterator createCopyInInitialState() {
return new Iterator();
}
}
private static int binarySearch(int[] set, int off, int length, int key) {
int low = off;
int high = length - 1;
while (low <= high) {
int mid = (low + high) >>> 1;
int midVal = Math.abs(set[mid]);
if (midVal < key)
low = mid + 1;
else if (midVal > key)
high = mid - 1;
else
return mid; // key found
}
return -(low + 1); // key not found.
}
public void forEach(TIntProcedure procedure) {
for(int i = 0; i < mySetLength; ++i) {
int value = mySet[i];
if (value > 0 && !procedure.execute(value)) break;
}
}
public boolean contains(int value) {
if(value <= 0) return false;
int pos = binarySearch(mySet, 0, mySetLength, value);
return pos >= 0 && mySet[pos] > 0;
}
@Override
public Object clone() {
try {
SortedIdSet set = (SortedIdSet)super.clone();
set.mySet = mySet.clone();
return set;
}
catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
public void compact() {
if(2 * mySize < mySetLength && mySetLength > 5) {
int positivePosition = -1;
for(int i = 0; i < mySetLength; ++i) {
if (mySet[i] < 0) {
while(i < mySetLength && mySet[i] < 0) ++i;
if (i == mySetLength) {
break;
} else {
mySet[++positivePosition] = mySet[i];
}
} else {
++positivePosition;
if (i != positivePosition) mySet[positivePosition] = mySet[i];
}
}
// todo slightly decrease size
mySetLength = (short)(positivePosition + 1);
}
}
public RandomAccessIntContainer ensureContainerCapacity(int count) {
int newSize = mySetLength + count;
if (newSize < mySet.length) return this;
if (newSize > ChangeBufferingList.MAX_FILES) {
return new IdBitSet(this, count);
}
newSize = ChangeBufferingList.calcNextArraySize(mySet.length, newSize);
assert newSize < Short.MAX_VALUE;
int[] newSet = new int[newSize]; // todo slightly increase size and compact
System.arraycopy(mySet, 0, newSet, 0, mySetLength);
mySet = newSet;
return this;
}
public int findNext(int i) {
while(i < mySetLength) {
if (mySet[i] > 0) return i;
++i;
}
return -1;
}
public int get(int cursor) {
assert cursor < mySetLength;
int value = mySet[cursor];
assert value > 0;
return value;
}
}
| |
/*******************************************************************************
* Copyright 2013 Michael Marconi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package oncue.backingstore;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.json.simple.JSONValue;
import com.typesafe.config.Config;
import akka.actor.ActorSystem;
import akka.event.Logging;
import akka.event.LoggingAdapter;
import oncue.common.messages.Job;
import oncue.common.messages.Job.State;
import oncue.common.settings.Settings;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import redis.clients.jedis.Protocol;
import redis.clients.jedis.Transaction;
import redis.clients.jedis.exceptions.JedisException;
public class RedisBackingStore extends AbstractBackingStore {
/**
* An AutoCloseable wrapper that manages fetching a connection from the redis connection pool
* and safely returning it when finished.
*
* This class proxies methods from Jedis for convenience.
*/
public static class RedisConnection implements AutoCloseable {
// A Redis connection pool
private static JedisPool redisPool;
private Jedis connection;
public RedisConnection() {
if (redisPool == null) {
redisPool = new JedisPool(new JedisPoolConfig(), host, port,
Protocol.DEFAULT_TIMEOUT, null);
}
this.connection = redisPool.getResource();
}
@Override
public void close() {
this.connection.close();
}
public Long incr(String key) {
return this.connection.incr(key);
}
public Transaction multi() {
return this.connection.multi();
}
public void lpush(String key, String value) {
this.connection.lpush(key, value);
}
public List<String> lrange(String key, int start, int end) {
return this.connection.lrange(key, start, end);
}
public String hget(String key, String field) {
return this.connection.hget(key, field);
}
public void hset(String key, String field, String value) {
this.connection.hset(key, field, value);
}
public void lrem(String key, int count, String value) {
this.connection.lrem(key, count, value);
}
public void del(String key) {
this.connection.del(key);
}
public Object rpoplpush(String srckey, String dstkey) {
return this.connection.rpoplpush(srckey, dstkey);
}
/**
* Flush the entire redis database. This should only be used in tests.
*/
public void flushDB() {
this.connection.flushDB();
}
public boolean exists(String key) {
return this.connection.exists(key);
}
public Long llen(String key) {
return this.connection.llen(key);
}
public List<String> brpop(int timeout, String key) {
return this.connection.brpop(timeout, key);
}
}
// Redis host config key
private static final String REDIS_HOST = "oncue.scheduler.backing-store.redis.host";
// Redis port config key
private static final String REDIS_PORT = "oncue.scheduler.backing-store.redis.port";
// Redis port
private static int port = Protocol.DEFAULT_PORT;
// The jobs that have completed successfully
public static final String COMPLETED_JOBS = "oncue:jobs:complete";
// The jobs that have failed
public static final String FAILED_JOBS = "oncue:jobs:failed";
// Redis host
private static String host = "localhost";
// The total count of persisted jobs
public static final String JOB_COUNT_KEY = "oncue:job_count";
// The time the job was enqueued
public static final String JOB_ENQUEUED_AT = "job_enqueued_at";
// The time the job was started
public static final String JOB_STARTED_AT = "job_started_at";
// The time the job was completed
public static final String JOB_COMPLETED_AT = "job_completed_at";
// The message associated with a failed job
public static final String JOB_ERROR_MESSAGE = "job_failure_message";
// The ID of a job
public static final String JOB_ID = "job_id";
// The key to a particular job
public static final String JOB_KEY = "oncue:jobs:%s";
// The job parameters
public static final String JOB_PARAMS = "job_params";
// The progress against a job
public static final String JOB_PROGRESS = "job_progress";
// The job state
public static final String JOB_STATE = "job_state";
// The worker type assigned to a job
public static final String JOB_WORKER_TYPE = "job_worker_type";
// The re-run status of a job
public static final String JOB_RERUN_STATUS = "job_rerun_status";
/*
* The queue of jobs that acts as an external interface; the scheduler component will watch this
* queue for new jobs
*/
public static final String NEW_JOBS = "oncue:jobs:new";
// The scheduled jobs dispatched by the scheduler component
public static final String SCHEDULED_JOBS = "oncue:jobs:scheduled";
// The unscheduled jobs held by the scheduler
public static final String UNSCHEDULED_JOBS = "oncue:jobs:unscheduled";
/**
* Create a new {@linkplain Job} and persist it in Redis
*
* @param workerType is the type of worker required to complete this job
*
* @param params is a map of job parameters
*
* @return a new {@linkplain Job}
*/
public static Job createJob(String workerType, Map<String, String> params) {
try (RedisConnection redis = new RedisConnection()) {
// Get the latest job ID
Long jobId = redis.incr(RedisBackingStore.JOB_COUNT_KEY);
// Create a new job
Job job = new Job(jobId, workerType);
if (params != null)
job.setParams(params);
// Now, persist the job and release the connection
persistJob(job, RedisBackingStore.NEW_JOBS, redis);
return job;
}
}
/**
* Construct a job from a given Job ID
*
* @param id is the id of the job
* @param redis is a connection to Redis
* @return a {@linkplain Job} that represents the job hash in Redis
*/
@SuppressWarnings("unchecked")
public static Job loadJob(long id, RedisConnection redis) {
String jobKey = String.format(JOB_KEY, id);
Job job;
try {
DateTime enqueuedAt = DateTime.parse(redis.hget(jobKey, JOB_ENQUEUED_AT));
DateTime startedAt = null;
String startedAtRaw = redis.hget(jobKey, JOB_STARTED_AT);
if (startedAtRaw != null)
startedAt = DateTime.parse(startedAtRaw);
DateTime completedAt = null;
String completedAtRaw = redis.hget(jobKey, JOB_COMPLETED_AT);
if (completedAtRaw != null)
completedAt = DateTime.parse(completedAtRaw);
String workerType = redis.hget(jobKey, JOB_WORKER_TYPE);
String state = redis.hget(jobKey, JOB_STATE);
String progress = redis.hget(jobKey, JOB_PROGRESS);
String params = redis.hget(jobKey, JOB_PARAMS);
String errorMessage = redis.hget(jobKey, JOB_ERROR_MESSAGE);
String rerunStatus = redis.hget(jobKey, JOB_RERUN_STATUS);
job = new Job(new Long(id), workerType);
job.setEnqueuedAt(enqueuedAt);
if (startedAt != null)
job.setStartedAt(startedAt);
if (completedAt != null)
job.setCompletedAt(completedAt);
job.setRerun(Boolean.parseBoolean(rerunStatus));
if (params != null)
job.setParams((Map<String, String>) JSONValue.parse(params));
if (state != null)
job.setState(State.valueOf(state.toUpperCase()));
if (progress != null)
job.setProgress(new Double(progress));
if (errorMessage != null)
job.setErrorMessage(errorMessage);
} catch (Exception e) {
throw new RuntimeException(
String.format("Could not load job with id %s from Redis", id), e);
}
return job;
}
/**
* Persist a job as a hash in Redis
*
* @param job is the {@linkplain Job} to persist
* @param queueName is the name of the queue to push the job onto
* @param redis is a connection to Redis
*/
public static void persistJob(Job job, String queueName, RedisConnection redis) {
// Persist the job in a transaction
try (Transaction transaction = redis.multi()) {
// Create a map describing the job
String jobKey = String.format(JOB_KEY, job.getId());
transaction.hset(jobKey, JOB_ENQUEUED_AT, job.getEnqueuedAt().toString());
if (job.getStartedAt() != null)
transaction.hset(jobKey, JOB_STARTED_AT, job.getStartedAt().toString());
if (job.getCompletedAt() != null)
transaction.hset(jobKey, JOB_COMPLETED_AT, job.getCompletedAt().toString());
transaction.hset(jobKey, JOB_WORKER_TYPE, job.getWorkerType());
transaction.hset(jobKey, JOB_RERUN_STATUS, Boolean.toString(job.isRerun()));
if (job.getParams() != null) {
Map<String, String> params = null;
switch (job.getState()) {
case COMPLETE:
case FAILED:
params = job.getParams(false);
break;
default:
params = job.getParams();
break;
}
transaction.hset(jobKey, JOB_PARAMS, JSONValue.toJSONString(params));
}
if (job.getState() != null)
transaction.hset(jobKey, JOB_STATE, job.getState().toString());
transaction.hset(jobKey, JOB_PROGRESS, String.valueOf(job.getProgress()));
if (job.getErrorMessage() != null)
transaction.hset(jobKey, JOB_ERROR_MESSAGE, job.getErrorMessage());
// Add the job to the specified queue
transaction.lpush(queueName, Long.toString(job.getId()));
// Exec the transaction
transaction.exec();
} catch (IOException e) {
// Jedis' Transaction.close() method does not actually throw IOException, it just says
// that it does. In fact it can only throw a JedisConnectionException, an instance of a
// RuntimeException. Let's wrap this in a JedisException anyway to be sure.
throw new JedisException(e);
}
}
// Logger
private LoggingAdapter log;
public RedisBackingStore(ActorSystem system, Settings settings) {
super(system, settings);
/*
* Override Redis hostname and port from configuration
*/
Config config = system.settings().config();
if (config.hasPath(REDIS_HOST)) {
host = config.getString(REDIS_HOST);
}
if (config.hasPath(REDIS_PORT)) {
port = config.getInt(REDIS_PORT);
}
log = Logging.getLogger(system, this);
log.info("Backing store expects Redis at: host={}, port={}", host, port);
}
@Override
public void addScheduledJobs(List<Job> jobs) {
try (RedisConnection redis = new RedisConnection()) {
for (Job job : jobs) {
redis.lpush(SCHEDULED_JOBS, Long.toString(job.getId()));
}
}
}
@Override
public void addUnscheduledJob(Job job) {
try (RedisConnection redis = new RedisConnection()) {
persistJob(job, UNSCHEDULED_JOBS, redis);
}
}
@Override
public List<Job> getCompletedJobs() {
List<Job> jobs = new ArrayList<>();
try (RedisConnection redis = new RedisConnection()) {
List<String> jobIDs = redis.lrange(COMPLETED_JOBS, 0, -1);
for (String jobID : jobIDs) {
jobs.add(loadJob(new Long(jobID), redis));
}
}
return jobs;
}
@Override
public List<Job> getFailedJobs() {
List<Job> jobs = new ArrayList<>();
try (RedisConnection redis = new RedisConnection()) {
List<String> jobIDs = redis.lrange(FAILED_JOBS, 0, -1);
for (String jobID : jobIDs) {
jobs.add(loadJob(new Long(jobID), redis));
}
}
return jobs;
}
@Override
public long getNextJobID() {
try (RedisConnection redis = new RedisConnection()) {
// Increment and return the latest job ID
return redis.incr(RedisBackingStore.JOB_COUNT_KEY);
}
}
@Override
public void persistJobFailure(Job job) {
try (RedisConnection redis = new RedisConnection()) {
persistJob(job, FAILED_JOBS, redis);
}
}
@Override
public void persistJobProgress(Job job) {
try (RedisConnection redis = new RedisConnection()) {
String jobKey = String.format(JOB_KEY, job.getId());
redis.hset(jobKey, JOB_PROGRESS, String.valueOf(job.getProgress()));
redis.hset(jobKey, JOB_STATE, job.getState().toString());
if (job.getStartedAt() != null)
redis.hset(jobKey, JOB_STARTED_AT, job.getStartedAt().toString());
if (job.getState() == Job.State.COMPLETE) {
if (job.getCompletedAt() != null)
redis.hset(jobKey, JOB_COMPLETED_AT, job.getCompletedAt().toString());
redis.lpush(COMPLETED_JOBS, Long.toString(job.getId()));
}
}
}
@Override
public void removeCompletedJobById(long jobId) {
try (RedisConnection redis = new RedisConnection()) {
redis.lrem(COMPLETED_JOBS, 0, Long.toString(jobId));
removeJobById(jobId, redis);
}
}
@Override
public void removeFailedJobById(long jobId) {
try (RedisConnection redis = new RedisConnection()) {
redis.lrem(FAILED_JOBS, 0, Long.toString(jobId));
removeJobById(jobId, redis);
}
}
@Override
public void removeScheduledJobById(long jobId) {
try (RedisConnection redis = new RedisConnection()) {
redis.lrem(SCHEDULED_JOBS, 0, Long.toString(jobId));
}
}
@Override
public void removeUnscheduledJobById(long jobId) {
try (RedisConnection redis = new RedisConnection()) {
redis.lrem(UNSCHEDULED_JOBS, 0, Long.toString(jobId));
}
}
public void removeJobById(long jobId, RedisConnection redis) {
redis.del(String.format(JOB_KEY, jobId));
}
/**
* When restoring the jobs queue, we need to look for all the jobs that were on the scheduler
* jobs queue in Redis, as well as the jobs that had been scheduled against agents, which we
* assume are dead.
*/
@Override
public List<Job> restoreJobs() {
List<Job> jobs = new ArrayList<>();
try (RedisConnection redis = new RedisConnection()) {
// Pop all scheduled jobs back onto the unscheduled jobs queue
while (redis.rpoplpush(SCHEDULED_JOBS, UNSCHEDULED_JOBS) != null) {
}
// Get all the unscheduled jobs
List<String> jobIDs = redis.lrange(UNSCHEDULED_JOBS, 0, -1);
for (String jobID : jobIDs) {
jobs.add(loadJob(new Long(jobID), redis));
}
}
return jobs;
}
@Override
public int cleanupJobs(boolean includeFailedJobs, Duration expirationAge) {
int cleanedJobsCount = 0;
for (Job completedJob : getCompletedJobs()) {
DateTime expirationThreshold = DateTime.now().minus(expirationAge.getMillis());
boolean isExpired = completedJob.getCompletedAt()
.isBefore(expirationThreshold.toInstant());
if (isExpired) {
removeCompletedJobById(completedJob.getId());
cleanedJobsCount++;
}
}
if (!includeFailedJobs) {
return cleanedJobsCount;
}
for (Job failedJob : getFailedJobs()) {
if (failedJob.getCompletedAt() == null) {
log.error(
"Found a failed job with no completion time. Setting completion time to now and defering to next clean up. ("
+ failedJob.toString() + ")");
failedJob.setCompletedAt(DateTime.now());
persistJobFailure(failedJob);
continue;
}
DateTime expirationThreshold = DateTime.now().minus(expirationAge.getMillis());
boolean isExpired = failedJob.getCompletedAt()
.isBefore(expirationThreshold.toInstant());
if (isExpired) {
removeFailedJobById(failedJob.getId());
cleanedJobsCount++;
}
}
return cleanedJobsCount;
}
}
| |
package org.openntf.conference.graph;
import org.openntf.conference.graph.Invite.InvitedTo;
import org.openntf.conference.graph.Invite.Invites;
import org.openntf.conference.graph.Presentation.PresentedBy;
import org.openntf.domino.graph2.annotations.AdjacencyUnique;
import org.openntf.domino.graph2.annotations.IncidenceUnique;
import org.openntf.domino.graph2.annotations.TypedProperty;
import org.openntf.domino.graph2.builtin.DEdgeFrame;
import org.openntf.domino.graph2.builtin.social.Socializer;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.frames.InVertex;
import com.tinkerpop.frames.OutVertex;
import com.tinkerpop.frames.modules.typedgraph.TypeValue;
@TypeValue("Attendee")
public interface Attendee extends Socializer {
public static enum Status {
REGISTERED, CANCELLED, PAID, CHECKEDIN, DEPARTED
}
@TypeValue(PlansToAttend.LABEL)
public static interface PlansToAttend extends DEdgeFrame {
public static final String LABEL = "PlansToAttend";
public static enum Status {
CANCELLED, FULFILLED, UNDETERMINED
}
@OutVertex
public Attendee getAttendee();
@InVertex
public Event getEvent();
@TypedProperty("Status")
public Status getStatus();
@TypedProperty("Status")
public void setStatus(Status status);
}
@TypeValue(Attending.LABEL)
public static interface Attending extends DEdgeFrame {
public static final String LABEL = "Attending";
@OutVertex
public Attendee getAttendee();
@InVertex
public Event getEvent();
}
@TypeValue(MemberOf.LABEL)
public static interface MemberOf extends DEdgeFrame {
public static final String LABEL = "MemberOf";
@InVertex
public Group getGroup();
@OutVertex
public Attendee getAttendee();
}
@TypedProperty("Firstname")
public String getFirstName();
@TypedProperty("Firstname")
public void setFirstName(String firstName);
@TypedProperty("Lastname")
public String getLastName();
@TypedProperty("Lastname")
public void setLastName(String lastName);
@TypedProperty("Email")
public String getEmail();
@TypedProperty("Email")
public void setEmail(String email);
@TypedProperty("Url")
public String getUrl();
@TypedProperty("Url")
public void setUrl(String url);
@TypedProperty("Twitter")
public String getTwitterId();
@TypedProperty("Twitter")
public void setTwitterId(String twitterId);
@TypedProperty("Facebook")
public String getFacebookId();
@TypedProperty("Facebook")
public void setFacebookId(String facebookId);
@TypedProperty("Phone")
public String getPhone();
@TypedProperty("Phone")
public void setPhone(String phone);
@TypedProperty("Country")
public String getCountry();
@TypedProperty("Country")
public void setCountry(String country);
@TypedProperty("Role")
public String getRole();
@TypedProperty("Role")
public void setRole(String role);
@TypedProperty(value = "Firstname + \" \" + Lastname", derived = true)
public String getFullname();
@AdjacencyUnique(label = PlansToAttend.LABEL)
public Iterable<Event> getPlansToAttendEvents();
@AdjacencyUnique(label = PlansToAttend.LABEL)
public PlansToAttend addPlansToAttend(Event event);
@AdjacencyUnique(label = PlansToAttend.LABEL)
public void removePlansToAttend(Event event);
@IncidenceUnique(label = PlansToAttend.LABEL)
public Iterable<PlansToAttend> getPlansToAttend();
@IncidenceUnique(label = PlansToAttend.LABEL)
public void removePlansToAttend(PlansToAttend plansToAttend);
@AdjacencyUnique(label = Attending.LABEL)
public Iterable<Event> getAttendingEvents();
@AdjacencyUnique(label = Attending.LABEL)
public Attending addAttending(Event event);
@AdjacencyUnique(label = Attending.LABEL)
public void removeAttending(Event event);
@IncidenceUnique(label = Attending.LABEL)
public Iterable<Attending> getAttendings();
@IncidenceUnique(label = Attending.LABEL)
public void removeAttending(Attending attending);
@AdjacencyUnique(label = PresentedBy.LABEL)
public Iterable<Event> getPresentingEvents();
@AdjacencyUnique(label = PresentedBy.LABEL)
public PresentedBy addPresentingEvent(Event event);
@AdjacencyUnique(label = PresentedBy.LABEL)
public void removePresentingEvent(Event event);
@IncidenceUnique(label = PresentedBy.LABEL)
public Iterable<PresentedBy> getPresentings();
@IncidenceUnique(label = PresentedBy.LABEL)
public void removePresenting(PresentedBy presentedBy);
@AdjacencyUnique(label = InvitedTo.LABEL, direction = Direction.IN)
public Iterable<Invite> getInvitedToInvites();
@AdjacencyUnique(label = InvitedTo.LABEL, direction = Direction.IN)
public Invites addInvitedTo(Invite invite);
@AdjacencyUnique(label = InvitedTo.LABEL, direction = Direction.IN)
public void removeInvitedTo(Invite invite);
@IncidenceUnique(label = InvitedTo.LABEL, direction = Direction.IN)
public Iterable<InvitedTo> getInvitedTos();
@IncidenceUnique(label = InvitedTo.LABEL, direction = Direction.IN)
public void removeInvitedTo(InvitedTo invitedTo);
@AdjacencyUnique(label = Invites.LABEL)
public Iterable<Invite> getInvitations();
@AdjacencyUnique(label = Invites.LABEL)
public Invites addInvitation(Invite invite);
@AdjacencyUnique(label = Invites.LABEL)
public void removeInvitation(Invite invite);
@IncidenceUnique(label = Invites.LABEL)
public Invites getInvites();
@IncidenceUnique(label = Invites.LABEL)
public void removeInvites(Invites invites);
@AdjacencyUnique(label = MemberOf.LABEL)
public Iterable<Group> getMemberOfGroups();
@AdjacencyUnique(label = MemberOf.LABEL)
public MemberOf addMemberOfGroup(Group group);
@AdjacencyUnique(label = MemberOf.LABEL)
public void removeMemberOfGroup(Group group);
@IncidenceUnique(label = MemberOf.LABEL)
public Iterable<MemberOf> getMemberOfs();
@IncidenceUnique(label = MemberOf.LABEL)
public void removeMemberOf(MemberOf memberOf);
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Class FrameDebuggerTree
* @author Jeka
*/
package com.intellij.debugger.ui.impl;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.DebuggerInvocationUtil;
import com.intellij.debugger.SourcePosition;
import com.intellij.debugger.engine.DebuggerUtils;
import com.intellij.debugger.engine.SuspendManager;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.engine.evaluation.TextWithImports;
import com.intellij.debugger.engine.evaluation.TextWithImportsImpl;
import com.intellij.debugger.impl.DebuggerContextImpl;
import com.intellij.debugger.impl.DebuggerSession;
import com.intellij.debugger.jdi.LocalVariableProxyImpl;
import com.intellij.debugger.jdi.StackFrameProxyImpl;
import com.intellij.debugger.jdi.ThreadReferenceProxyImpl;
import com.intellij.debugger.settings.ViewsGeneralSettings;
import com.intellij.debugger.ui.impl.watch.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.text.CharArrayUtil;
import com.intellij.util.ui.tree.TreeModelAdapter;
import com.sun.jdi.ObjectCollectedException;
import javax.swing.event.TreeModelEvent;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
public class FrameDebuggerTree extends DebuggerTree {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.ui.impl.FrameDebuggerTree");
private boolean myAnyNewLocals;
private boolean myAutoWatchMode = false;
public FrameDebuggerTree(Project project) {
super(project);
}
public boolean isAutoWatchMode() {
return myAutoWatchMode;
}
public void setAutoVariablesMode(final boolean autoWatchMode) {
final boolean valueChanged = myAutoWatchMode != autoWatchMode;
myAutoWatchMode = autoWatchMode;
if (valueChanged) {
rebuild(getDebuggerContext());
}
}
protected void build(DebuggerContextImpl context) {
myAnyNewLocals = false;
buildWhenPaused(context, new RefreshFrameTreeCommand(context));
}
public void restoreNodeState(DebuggerTreeNodeImpl node) {
if (myAnyNewLocals) {
final NodeDescriptorImpl descriptor = node.getDescriptor();
final boolean isLocalVar = descriptor instanceof LocalVariableDescriptorImpl;
descriptor.myIsSelected &= isLocalVar;
// override this setting so that tree will scroll to new locals
descriptor.myIsVisible = isLocalVar && descriptor.myIsSelected;
if (!descriptor.myIsVisible) {
descriptor.putUserData(VISIBLE_RECT, null);
}
}
super.restoreNodeState(node);
if (myAnyNewLocals && node.getDescriptor().myIsExpanded) {
DebuggerTreeNodeImpl root = (DebuggerTreeNodeImpl)getMutableModel().getRoot();
scrollToVisible(root);
}
}
protected BuildNodeCommand getBuildNodeCommand(final DebuggerTreeNodeImpl node) {
if (node.getDescriptor() instanceof StackFrameDescriptorImpl) {
return new BuildFrameTreeVariablesCommand(node);
}
return super.getBuildNodeCommand(node);
}
private class BuildFrameTreeVariablesCommand extends BuildStackFrameCommand {
public BuildFrameTreeVariablesCommand(DebuggerTreeNodeImpl stackNode) {
super(stackNode);
}
protected void buildVariables(final StackFrameDescriptorImpl stackDescriptor, final EvaluationContextImpl evaluationContext)
throws EvaluateException {
final SourcePosition sourcePosition = getDebuggerContext().getSourcePosition();
if (sourcePosition == null) {
return;
}
final Map<String, LocalVariableProxyImpl> visibleVariables = getVisibleVariables(stackDescriptor);
final Pair<Set<String>, Set<TextWithImports>> usedVars =
ApplicationManager.getApplication().runReadAction(new Computable<Pair<Set<String>, Set<TextWithImports>>>() {
public Pair<Set<String>, Set<TextWithImports>> compute() {
return findReferencedVars(visibleVariables.keySet(), sourcePosition);
}
});
// add locals
if (myAutoWatchMode) {
for (String var : usedVars.first) {
final LocalVariableDescriptorImpl descriptor = myNodeManager.getLocalVariableDescriptor(stackDescriptor, visibleVariables.get(var));
myChildren.add(myNodeManager.createNode(descriptor, evaluationContext));
}
}
else {
super.buildVariables(stackDescriptor, evaluationContext);
}
// add expressions
final EvaluationContextImpl evalContextCopy = evaluationContext.createEvaluationContext(evaluationContext.getThisObject());
evalContextCopy.setAutoLoadClasses(false);
for (TextWithImports text : usedVars.second) {
myChildren.add(myNodeManager.createNode(myNodeManager.getWatchItemDescriptor(stackDescriptor, text, null), evalContextCopy));
}
}
}
private static Map<String, LocalVariableProxyImpl> getVisibleVariables(final StackFrameDescriptorImpl stackDescriptor) throws EvaluateException {
final StackFrameProxyImpl frame = stackDescriptor.getFrameProxy();
if (frame == null) {
return Collections.emptyMap();
}
final Map<String, LocalVariableProxyImpl> vars = new HashMap<String, LocalVariableProxyImpl>();
for (LocalVariableProxyImpl localVariableProxy : frame.visibleVariables()) {
vars.put(localVariableProxy.name(), localVariableProxy);
}
return vars;
}
private static boolean shouldSkipLine(final PsiFile file, Document doc, int line) {
final int start = doc.getLineStartOffset(line);
final int end = doc.getLineEndOffset(line);
final int _start = CharArrayUtil.shiftForward(doc.getCharsSequence(), start, " \n\t");
if (_start >= end) {
return true;
}
TextRange alreadyChecked = null;
for (PsiElement elem = file.findElementAt(_start); elem != null && elem.getTextOffset() <= end && (alreadyChecked == null || !alreadyChecked .contains(elem.getTextRange())); elem = elem.getNextSibling()) {
for (PsiElement _elem = elem; _elem.getTextOffset() >= _start; _elem = _elem.getParent()) {
alreadyChecked = _elem.getTextRange();
if (_elem instanceof PsiDeclarationStatement) {
final PsiElement[] declared = ((PsiDeclarationStatement)_elem).getDeclaredElements();
for (PsiElement declaredElement : declared) {
if (declaredElement instanceof PsiVariable) {
return false;
}
}
}
if (_elem instanceof PsiJavaCodeReferenceElement) {
final PsiElement resolved = ((PsiJavaCodeReferenceElement)_elem).resolve();
if (resolved instanceof PsiVariable) {
return false;
}
}
}
}
return true;
}
private static Pair<Set<String>, Set<TextWithImports>> findReferencedVars(final Set<String> visibleVars, final SourcePosition position) {
final int line = position.getLine();
if (line < 0) {
return new Pair<Set<String>, Set<TextWithImports>>(Collections.<String>emptySet(), Collections.<TextWithImports>emptySet());
}
final PsiFile positionFile = position.getFile();
final VirtualFile vFile = positionFile.getVirtualFile();
final Document doc = vFile != null? FileDocumentManager.getInstance().getDocument(vFile) : null;
if (doc == null || doc.getLineCount() == 0 || line > (doc.getLineCount() - 1)) {
return new Pair<Set<String>, Set<TextWithImports>>(Collections.<String>emptySet(), Collections.<TextWithImports>emptySet());
}
final TextRange limit = calculateLimitRange(positionFile, doc, line);
int startLine = Math.max(limit.getStartOffset(), line - 1);
startLine = Math.min(startLine, limit.getEndOffset());
while (startLine > limit.getStartOffset() && shouldSkipLine(positionFile, doc, startLine)) {
startLine--;
}
final int startOffset = doc.getLineStartOffset(startLine);
int endLine = Math.min(line + 2, limit.getEndOffset());
while (endLine < limit.getEndOffset() && shouldSkipLine(positionFile, doc, endLine)) {
endLine++;
}
final int endOffset = doc.getLineEndOffset(endLine);
final TextRange lineRange = new TextRange(startOffset, endOffset);
if (!lineRange.isEmpty()) {
final int offset = CharArrayUtil.shiftForward(doc.getCharsSequence(), doc.getLineStartOffset(line), " \t");
PsiElement element = positionFile.findElementAt(offset);
if (element != null) {
PsiMethod method = PsiTreeUtil.getNonStrictParentOfType(element, PsiMethod.class);
if (method != null) {
element = method;
}
else {
PsiField field = PsiTreeUtil.getNonStrictParentOfType(element, PsiField.class);
if (field != null) {
element = field;
}
else {
final PsiClassInitializer initializer = PsiTreeUtil.getNonStrictParentOfType(element, PsiClassInitializer.class);
if (initializer != null) {
element = initializer;
}
}
}
//noinspection unchecked
if (element instanceof PsiCompiledElement) {
return new Pair<Set<String>, Set<TextWithImports>>(visibleVars, Collections.<TextWithImports>emptySet());
}
else {
final Set<String> vars = new HashSet<String>();
final Set<TextWithImports> expressions = new HashSet<TextWithImports>();
final PsiElementVisitor variablesCollector = new VariablesCollector(visibleVars, adjustRange(element, lineRange), expressions, vars);
element.accept(variablesCollector);
return new Pair<Set<String>, Set<TextWithImports>>(vars, expressions);
}
}
}
return new Pair<Set<String>, Set<TextWithImports>>(Collections.<String>emptySet(), Collections.<TextWithImports>emptySet());
}
private static TextRange calculateLimitRange(final PsiFile file, final Document doc, final int line) {
final int offset = doc.getLineStartOffset(line);
if (offset > 0) {
for (PsiElement elem = file.findElementAt(offset); elem != null; elem = elem.getParent()) {
if (elem instanceof PsiMethod) {
final TextRange elemRange = elem.getTextRange();
return new TextRange(doc.getLineNumber(elemRange.getStartOffset()), doc.getLineNumber(elemRange.getEndOffset()));
}
}
}
return new TextRange(0, doc.getLineCount() - 1);
}
private static TextRange adjustRange(final PsiElement element, final TextRange originalRange) {
final Ref<TextRange> rangeRef = new Ref<TextRange>(originalRange);
element.accept(new JavaRecursiveElementVisitor() {
@Override public void visitExpressionStatement(final PsiExpressionStatement statement) {
final TextRange stRange = statement.getTextRange();
if (originalRange.intersects(stRange)) {
final TextRange currentRange = rangeRef.get();
final int start = Math.min(currentRange.getStartOffset(), stRange.getStartOffset());
final int end = Math.max(currentRange.getEndOffset(), stRange.getEndOffset());
rangeRef.set(new TextRange(start, end));
}
}
});
return rangeRef.get();
}
private class RefreshFrameTreeCommand extends RefreshDebuggerTreeCommand {
public RefreshFrameTreeCommand(DebuggerContextImpl context) {
super(context);
}
public void contextAction() throws Exception {
DebuggerTreeNodeImpl rootNode;
final DebuggerContextImpl debuggerContext = getDebuggerContext();
final ThreadReferenceProxyImpl currentThread = debuggerContext.getThreadProxy();
if (currentThread == null) {
return;
}
try {
StackFrameProxyImpl frame = debuggerContext.getFrameProxy();
if (frame != null) {
NodeManagerImpl nodeManager = getNodeFactory();
rootNode = nodeManager.createNode(nodeManager.getStackFrameDescriptor(null, frame), debuggerContext.createEvaluationContext());
}
else {
rootNode = getNodeFactory().getDefaultNode();
SuspendManager suspendManager = getSuspendContext().getDebugProcess().getSuspendManager();
try {
if (suspendManager.isSuspended(currentThread)) {
try {
if (currentThread.frameCount() == 0) {
rootNode.add(MessageDescriptor.THREAD_IS_EMPTY);
}
else {
rootNode.add(MessageDescriptor.DEBUG_INFO_UNAVAILABLE);
}
}
catch (EvaluateException e) {
rootNode.add(new MessageDescriptor(e.getMessage()));
}
}
else {
rootNode.add(MessageDescriptor.THREAD_IS_RUNNING);
}
}
catch (ObjectCollectedException e) {
rootNode.add(new MessageDescriptor(DebuggerBundle.message("label.thread.node.thread.collected", currentThread.name())));
}
}
}
catch (Exception ex) {
if (LOG.isDebugEnabled()) {
LOG.debug(ex);
}
rootNode = getNodeFactory().getDefaultNode();
rootNode.add(MessageDescriptor.DEBUG_INFO_UNAVAILABLE);
}
final DebuggerTreeNodeImpl rootNode1 = rootNode;
DebuggerInvocationUtil.swingInvokeLater(getProject(), new Runnable() {
public void run() {
getMutableModel().setRoot(rootNode1);
treeChanged();
final TreeModel model = getModel();
model.addTreeModelListener(new TreeModelAdapter() {
public void treeStructureChanged(TreeModelEvent e) {
final Object[] path = e.getPath();
if (path.length > 0 && path[path.length - 1] == rootNode1) {
// wait until rootNode1 (the root just set) becomes the root
model.removeTreeModelListener(this);
if (ViewsGeneralSettings.getInstance().AUTOSCROLL_TO_NEW_LOCALS) {
autoscrollToNewLocals(rootNode1);
}
else {
// should clear this flag, otherwise, if AUTOSCROLL_TO_NEW_LOCALS option turned
// to true during the debug process, all these variables will be considered 'new'
for (Enumeration children = rootNode1.rawChildren(); children.hasMoreElements();) {
final DebuggerTreeNodeImpl child = (DebuggerTreeNodeImpl)children.nextElement();
final NodeDescriptorImpl descriptor = child.getDescriptor();
if (descriptor instanceof LocalVariableDescriptorImpl) {
((LocalVariableDescriptorImpl)descriptor).setNewLocal(false);
}
}
}
}
}
});
}
private void autoscrollToNewLocals(DebuggerTreeNodeImpl frameNode) {
final DebuggerSession debuggerSession = debuggerContext.getDebuggerSession();
final boolean isSteppingThrough = debuggerSession.isSteppingThrough(debuggerContext.getThreadProxy());
for (Enumeration e = frameNode.rawChildren(); e.hasMoreElements();) {
final DebuggerTreeNodeImpl child = (DebuggerTreeNodeImpl)e.nextElement();
final NodeDescriptorImpl descriptor = child.getDescriptor();
if (!(descriptor instanceof LocalVariableDescriptorImpl)) {
continue;
}
final LocalVariableDescriptorImpl localVariableDescriptor = (LocalVariableDescriptorImpl)descriptor;
if (isSteppingThrough && localVariableDescriptor.isNewLocal()) {
TreePath treePath = new TreePath(child.getPath());
addSelectionPath(treePath);
myAnyNewLocals = true;
descriptor.myIsSelected = true;
}
else {
removeSelectionPath(new TreePath(child.getPath()));
descriptor.myIsSelected = false;
}
localVariableDescriptor.setNewLocal(false);
}
}
});
}
}
private static class VariablesCollector extends JavaRecursiveElementVisitor {
private final Set<String> myVisibleLocals;
private final TextRange myLineRange;
private final Set<TextWithImports> myExpressions;
private final Set<String> myVars;
public VariablesCollector(final Set<String> visibleLocals, final TextRange lineRange, final Set<TextWithImports> expressions, final Set<String> vars) {
myVisibleLocals = visibleLocals;
myLineRange = lineRange;
myExpressions = expressions;
myVars = vars;
}
@Override public void visitElement(final PsiElement element) {
if (myLineRange.intersects(element.getTextRange())) {
super.visitElement(element);
}
}
@Override public void visitMethodCallExpression(final PsiMethodCallExpression expression) {
final PsiMethod psiMethod = expression.resolveMethod();
if (psiMethod != null && !hasSideEffects(expression)) {
myExpressions.add(new TextWithImportsImpl(expression));
}
super.visitMethodCallExpression(expression);
}
@Override public void visitReferenceExpression(final PsiReferenceExpression reference) {
if (myLineRange.intersects(reference.getTextRange())) {
final PsiElement psiElement = reference.resolve();
if (psiElement instanceof PsiVariable) {
final PsiVariable var = (PsiVariable)psiElement;
if (var instanceof PsiField) {
if (!hasSideEffects(reference)) {
if (var instanceof PsiEnumConstant && reference.getQualifier() == null) {
final PsiClass enumClass = ((PsiEnumConstant)var).getContainingClass();
if (enumClass != null) {
final PsiExpression expression = JavaPsiFacade.getInstance(var.getProject()).getParserFacade().createExpressionFromText(enumClass.getName() + "." + var.getName(), var);
final PsiReference ref = expression.getReference();
if (ref != null) {
ref.bindToElement(var);
myExpressions.add(new TextWithImportsImpl(expression));
}
}
}
else {
myExpressions.add(new TextWithImportsImpl(reference));
}
}
}
else {
if (myVisibleLocals.contains(var.getName())) {
myVars.add(var.getName());
}
}
}
}
super.visitReferenceExpression(reference);
}
@Override public void visitArrayAccessExpression(final PsiArrayAccessExpression expression) {
if (!hasSideEffects(expression)) {
myExpressions.add(new TextWithImportsImpl(expression));
}
super.visitArrayAccessExpression(expression);
}
@Override public void visitParameter(final PsiParameter parameter) {
processVariable(parameter);
super.visitParameter(parameter);
}
@Override public void visitLocalVariable(final PsiLocalVariable variable) {
processVariable(variable);
super.visitLocalVariable(variable);
}
private void processVariable(final PsiVariable variable) {
if (myLineRange.intersects(variable.getTextRange()) && myVisibleLocals.contains(variable.getName())) {
myVars.add(variable.getName());
}
}
@Override public void visitClass(final PsiClass aClass) {
// Do not step in to local and anonymous classes...
}
private boolean hasSideEffects(PsiElement element) {
final AtomicBoolean rv = new AtomicBoolean(false);
element.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitPostfixExpression(final PsiPostfixExpression expression) {
rv.set(true);
}
@Override public void visitReferenceExpression(final PsiReferenceExpression expression) {
final PsiElement psiElement = expression.resolve();
if (psiElement instanceof PsiLocalVariable) {
if (!myVisibleLocals.contains(((PsiLocalVariable)psiElement).getName())) {
rv.set(true);
}
}
else if (psiElement instanceof PsiMethod) {
final PsiMethod method = (PsiMethod)psiElement;
if (!DebuggerUtils.isSimpleGetter(method)) {
rv.set(true);
}
}
if (!rv.get()) {
super.visitReferenceExpression(expression);
}
}
@Override public void visitPrefixExpression(final PsiPrefixExpression expression) {
final IElementType op = expression.getOperationTokenType();
if (JavaTokenType.PLUSPLUS.equals(op) || JavaTokenType.MINUSMINUS.equals(op)) {
rv.set(true);
}
else {
super.visitPrefixExpression(expression);
}
}
@Override public void visitAssignmentExpression(final PsiAssignmentExpression expression) {
rv.set(true);
}
@Override public void visitCallExpression(final PsiCallExpression callExpression) {
final PsiMethod method = callExpression.resolveMethod();
if (method == null || !DebuggerUtils.isSimpleGetter(method)) {
rv.set(true);
}
else {
super.visitCallExpression(callExpression);
}
}
});
return rv.get();
}
}
}
| |
package cml.language.loader;
import cml.io.Console;
import cml.io.Directory;
import cml.io.ModuleManager;
import cml.io.SourceFile;
import cml.language.features.TempModule;
import cml.language.foundation.Diagnostic;
import cml.language.foundation.TempModel;
import cml.language.generated.Element;
import cml.language.generated.Import;
import cml.language.generated.Location;
import cml.language.generated.ModelElement;
import cml.language.grammar.CMLLexer;
import cml.language.grammar.CMLParser;
import cml.language.grammar.CMLParser.CompilationUnitContext;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.jetbrains.annotations.Nullable;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static cml.language.functions.ModelVisitorFunctions.visitModel;
import static cml.language.functions.ModuleFunctions.createImportOfModule;
import static cml.language.functions.ModuleFunctions.importedModuleOf;
public interface ModelLoader
{
int loadModel(TempModel model, String moduleName);
static ModelLoader create(Console console, ModuleManager moduleManager)
{
return new ModelLoaderImpl(console, moduleManager);
}
}
class ModelLoaderImpl implements ModelLoader
{
private static final String CML_BASE_MODULE = "cml_base";
private static final int SUCCESS = 0;
private static final int FAILURE__SOURCE_FILE_NOT_FOUND = 2;
private static final int FAILURE__FAILED_LOADING_MODEL = 3;
private static final int FAILURE__MODEL_VALIDATION = 4;
private static final String NO_SOURCE_FILES_IN_MODULE = "no source files in module: %s";
private static final String NO_SOURCE_DIR_IN_MODULE = "no source dir in module: %s";
private final Console console;
private final ModuleManager moduleManager;
ModelLoaderImpl(final Console console, final ModuleManager moduleManager)
{
this.console = console;
this.moduleManager = moduleManager;
}
@Override
public int loadModel(TempModel model, String moduleName)
{
try
{
final int exitCode = loadModule(model, moduleName, null);
if (exitCode == SUCCESS)
{
linkFunctions(model);
linkLambdaScope(model);
return validateModel(model);
}
return exitCode;
}
catch (final Throwable exception)
{
if (exception.getMessage() == null)
{
console.error("Unable to parse source files.");
}
else
{
console.error(exception.getMessage());
}
if (!(exception instanceof ModelLoadingException))
{
exception.printStackTrace(System.err);
}
return FAILURE__FAILED_LOADING_MODEL;
}
}
private int loadModule(TempModel model, String moduleName, @Nullable Import _import) throws IOException
{
TempModule module;
if (_import == null)
{
module = TempModule.create(model, moduleName);
}
else if (_import.isFirstImport())
{
module = (TempModule) _import.getImportedModule();
}
else
{
return SUCCESS;
}
final Optional<Directory> sourceDir = moduleManager.findSourceDir(moduleName);
if (sourceDir.isPresent())
{
final List<SourceFile> sourceFiles = moduleManager.findSourceFiles(moduleName);
if (sourceFiles.isEmpty())
{
console.error(NO_SOURCE_FILES_IN_MODULE, moduleName);
return FAILURE__SOURCE_FILE_NOT_FOUND;
}
final List<CompilationUnitContext> compilationUnitContexts = new ArrayList<>();
for (SourceFile sourceFile: sourceFiles)
{
final CompilationUnitContext compilationUnitContext = parse(sourceFile);
synthesizeModule(module, compilationUnitContext);
compilationUnitContexts.add(compilationUnitContext);
}
addBaseModule(module);
for (Import i: module.getImports())
{
int exitCode = loadModule(model, i.getName(), i);
if (exitCode != SUCCESS)
{
return exitCode;
}
}
for (CompilationUnitContext compilationUnitContext: compilationUnitContexts)
{
augmentModule(module, compilationUnitContext);
}
}
else
{
console.info(NO_SOURCE_DIR_IN_MODULE, moduleName);
}
return SUCCESS;
}
private void addBaseModule(TempModule module)
{
if (!module.getName().equals(CML_BASE_MODULE) && !importedModuleOf(module, CML_BASE_MODULE).isPresent())
{
createImportOfModule(CML_BASE_MODULE, null, module);
}
}
private void synthesizeModule(TempModule module, CompilationUnitContext compilationUnitContext)
{
final ParseTreeWalker walker = new ParseTreeWalker();
final ModelSynthesizer modelSynthesizer = new ModelSynthesizer(module);
walker.walk(modelSynthesizer, compilationUnitContext);
}
private void augmentModule(TempModule module, CompilationUnitContext compilationUnitContext)
{
final ParseTreeWalker walker = new ParseTreeWalker();
final ModelAugmenter modelAugmenter = new ModelAugmenter(module);
walker.walk(modelAugmenter, compilationUnitContext);
}
private void linkFunctions(final TempModel model)
{
visitModel(model, new FunctionLinker());
}
private void linkLambdaScope(final TempModel model)
{
visitModel(model, new LambdaScopeLinker());
}
private int validateModel(TempModel model)
{
final ModelValidator modelValidator = new ModelValidator();
visitModel(model, modelValidator);
if (modelValidator.getDiagnostics().size() == 0)
{
return SUCCESS;
}
else
{
for (Diagnostic diagnostic: modelValidator.getDiagnostics())
{
console.print(
"\nFailed validation: required %s: in %s",
diagnostic.getCode(),
diagnostic.getElement().getDiagnosticId());
printLocation(diagnostic.getElement());
if (diagnostic.getMessage().isPresent())
{
console.println(diagnostic.getMessage().get());
}
for(Element element: diagnostic.getParticipants())
{
console.print("- %s", element.getDiagnosticId());
printLocation(element);
}
}
return FAILURE__MODEL_VALIDATION;
}
}
private void printLocation(Element element)
{
if (element instanceof ModelElement)
{
final ModelElement modelElement = (ModelElement) element;
if (modelElement.getLocation().isPresent())
{
final Location location = modelElement.getLocation().get();
console.print(" (%d:%d)", location.getLine(), location.getColumn());
}
}
console.println("");
}
private CompilationUnitContext parse(SourceFile sourceFile) throws IOException
{
try (final FileInputStream fileInputStream = new FileInputStream(sourceFile.getPath()))
{
final ANTLRInputStream input = new ANTLRInputStream(fileInputStream);
final CMLLexer lexer = new CMLLexer(input);
final CommonTokenStream tokens = new CommonTokenStream(lexer);
final CMLParser parser = new CMLParser(tokens);
final SyntaxErrorListener syntaxErrorListener = new SyntaxErrorListener(console);
parser.getErrorListeners().clear();
parser.addErrorListener(syntaxErrorListener);
return parser.compilationUnit();
}
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.util;
import com.google.common.base.CharMatcher;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.j2objc.J2ObjC;
import com.google.devtools.j2objc.Options;
import com.google.devtools.j2objc.types.NativeType;
import com.google.devtools.j2objc.types.PointerType;
import com.google.j2objc.annotations.ObjectiveCName;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.TypeMirror;
/**
* Singleton service for type/method/variable name support.
*
* @author Tom Ball
*/
public class NameTable {
private final TypeUtil typeUtil;
private final ElementUtil elementUtil;
private final CaptureInfo captureInfo;
private final Map<VariableElement, String> variableNames = new HashMap<>();
private final Map<ExecutableElement, String> methodSelectorCache = new HashMap<>();
private final Map<TypeElement, String> fullNameCache = new HashMap<>();
public static final String INIT_NAME = "init";
public static final String RETAIN_METHOD = "retain";
public static final String RELEASE_METHOD = "release";
public static final String DEALLOC_METHOD = "dealloc";
public static final String FINALIZE_METHOD = "finalize";
// The JDT compiler requires package-info files be named as "package-info",
// but that's an illegal type to generate.
public static final String PACKAGE_INFO_CLASS_NAME = "package-info";
private static final String PACKAGE_INFO_OBJC_NAME = "package_info";
// The self name in Java is reserved in Objective-C, but functionized methods
// actually want the first parameter to be self. This is an internal name,
// converted to self during generation.
public static final String SELF_NAME = "$$self$$";
public static final String ID_TYPE = "id";
private static final Logger logger = Logger.getLogger(NameTable.class.getName());
private static final String RESERVED_NAMES_FILE = "reserved_names.txt";
private static final Splitter RESERVED_NAMES_SPLITTER =
Splitter.on(CharMatcher.whitespace()).omitEmptyStrings();
/**
* The list of predefined types, common primitive typedefs, constants and
* variables. Loaded from a resource file.
*/
private static final ImmutableSet<String> reservedNames = loadReservedNames();
// Regex pattern for fully-qualified Java class or package names.
private static final String JAVA_CLASS_NAME_REGEX
= "(\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*\\.)*"
+ "\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*";
private static final Pattern JAVA_CLASS_NAME_PATTERN = Pattern.compile(JAVA_CLASS_NAME_REGEX);
private static ImmutableSet<String> loadReservedNames() {
try (InputStream stream = J2ObjC.class.getResourceAsStream(RESERVED_NAMES_FILE);
BufferedReader lines = new BufferedReader(new InputStreamReader(stream, "UTF-8"))) {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
String line;
while ((line = lines.readLine()) != null) {
if (line.startsWith("#")) {
continue;
}
builder.addAll(RESERVED_NAMES_SPLITTER.split(line));
}
return builder.build();
} catch (IOException e) {
throw new AssertionError(e);
}
}
private static final ImmutableSet<String> badParameterNames = ImmutableSet.of(
// Objective-C type qualifier keywords.
"in", "out", "inout", "oneway", "bycopy", "byref");
/**
* List of NSObject message names. Java methods with one of these names are
* renamed to avoid unintentional overriding. Message names with trailing
* colons are not included since they can't be overridden. For example,
* "public boolean isEqual(Object o)" would be translated as
* "- (BOOL)isEqualWithObject:(NSObject *)o", not NSObject's "isEqual:".
*/
private static final ImmutableSet<String> nsObjectMessages = ImmutableSet.of(
"alloc", "attributeKeys", "autoContentAccessingProxy", "autorelease",
"classCode", "classDescription", "classForArchiver",
"classForKeyedArchiver", "classFallbacksForKeyedArchiver",
"classForPortCoder", "className", "copy", "dealloc", "description",
"hash", "init", "initialize", "isProxy", "load", "mutableCopy", "new",
"release", "retain", "retainCount", "scriptingProperties", "self",
"superclass", "toManyRelationshipKeys", "toOneRelationshipKeys",
"version");
/**
* Map of package names to their specified prefixes. Multiple packages
* can share a prefix; for example, the com.google.common packages in
* Guava could share a "GG" (Google Guava) or simply "Guava" prefix.
*/
private final PackagePrefixes prefixMap;
private final ImmutableMap<String, String> classMappings;
private final ImmutableMap<String, String> methodMappings;
public NameTable(TypeUtil typeUtil, CaptureInfo captureInfo, Options options) {
this.typeUtil = typeUtil;
this.elementUtil = typeUtil.elementUtil();
this.captureInfo = captureInfo;
prefixMap = options.getPackagePrefixes();
classMappings = options.getMappings().getClassMappings();
methodMappings = options.getMappings().getMethodMappings();
}
public void setVariableName(VariableElement var, String name) {
String previousName = variableNames.get(var);
if (previousName != null && !previousName.equals(name)) {
logger.fine(String.format("Changing previous rename for variable: %s. Was: %s, now: %s",
var.toString(), previousName, name));
}
variableNames.put(var, name);
}
/**
* Gets the variable name without any qualifying class name or other prefix
* or suffix attached.
*/
public String getVariableBaseName(VariableElement var) {
return getVarBaseName(var, ElementUtil.isGlobalVar(var));
}
/**
* Gets the name of the accessor method for a static variable.
*/
public String getStaticAccessorName(VariableElement var) {
return getVarBaseName(var, false);
}
private String getVarBaseName(VariableElement var, boolean allowReservedName) {
String name = variableNames.get(var);
if (name != null) {
return name;
}
name = ElementUtil.getName(var);
if (allowReservedName) {
return name;
}
name = maybeRenameVar(var, name);
return name.equals(SELF_NAME) ? "self" : name;
}
private static String maybeRenameVar(VariableElement var, String name) {
if (isReservedName(name)) {
name += '_';
} else if (ElementUtil.isParameter(var) && badParameterNames.contains(name)) {
name += "Arg";
}
return name;
}
/**
* Gets the variable or parameter name that should be used in a doc-comment.
* This may be wrong if a variable is renamed by a translation phase, but will
* handle all the reserved and bad parameter renamings correctly.
*/
public static String getDocCommentVariableName(VariableElement var) {
return maybeRenameVar(var, ElementUtil.getName(var));
}
/**
* Gets the non-qualified variable name, with underscore suffix.
*/
public String getVariableShortName(VariableElement var) {
String baseName = getVariableBaseName(var);
if (var.getKind().isField() && !ElementUtil.isGlobalVar(var)) {
return baseName + '_';
}
return baseName;
}
/**
* Gets the name of the variable as it is declared in ObjC, fully qualified.
*/
public String getVariableQualifiedName(VariableElement var) {
String shortName = getVariableShortName(var);
if (ElementUtil.isGlobalVar(var)) {
String className = getFullName(ElementUtil.getDeclaringClass(var));
if (ElementUtil.isEnumConstant(var)) {
// Enums are declared in an array, so we use a macro to shorten the
// array access expression.
return "JreEnum(" + className + ", " + shortName + ")";
}
return className + '_' + shortName;
}
return shortName;
}
/**
* Returns the name of an annotation property variable, extracted from its accessor element.
*/
public static String getAnnotationPropertyName(ExecutableElement element) {
return getMethodName(element);
}
/**
* Capitalize the first letter of a string.
*/
public static String capitalize(String s) {
return s.length() > 0 ? Character.toUpperCase(s.charAt(0)) + s.substring(1) : s;
}
/**
* Given a period-separated name, return as a camel-cased type name. For
* example, java.util.logging.Level is returned as JavaUtilLoggingLevel.
*/
public static String camelCaseQualifiedName(String fqn) {
StringBuilder sb = new StringBuilder();
for (String part : fqn.split("\\.")) {
sb.append(capitalize(part));
}
return sb.toString();
}
/**
* Given a path, return as a camel-cased name. Used, for example, in header guards.
*/
public static String camelCasePath(String fqn) {
StringBuilder sb = new StringBuilder();
for (String part : fqn.split(Pattern.quote(File.separator))) {
sb.append(capitalize(part));
}
return sb.toString();
}
private static final Pattern FAMILY_METHOD_REGEX =
Pattern.compile("^[_]*(new|copy|alloc|init|mutableCopy).*");
public static boolean needsObjcMethodFamilyNoneAttribute(String name) {
return FAMILY_METHOD_REGEX.matcher(name).matches();
}
private String getParameterTypeKeyword(TypeMirror type) {
int arrayDimensions = 0;
while (TypeUtil.isArray(type)) {
type = ((ArrayType) type).getComponentType();
arrayDimensions++;
}
String name;
if (type.getKind().isPrimitive()) {
name = TypeUtil.getName(type);
} else {
// For type variables, use the first bound for the parameter keyword.
List<? extends TypeMirror> bounds = typeUtil.getUpperBounds(type);
TypeElement elem = bounds.isEmpty()
? TypeUtil.NS_OBJECT : typeUtil.getObjcClass(bounds.get(0));
assert elem != null;
if (arrayDimensions == 0 && elem.equals(TypeUtil.NS_OBJECT)) {
// Special case: Non-array object types become "id".
return ID_TYPE;
}
name = getFullName(elem);
}
if (arrayDimensions > 0) {
name += "Array";
if (arrayDimensions > 1) {
name += arrayDimensions;
}
}
return name;
}
private String parameterKeyword(TypeMirror type) {
return "with" + capitalize(getParameterTypeKeyword(type));
}
private static final Pattern SELECTOR_VALIDATOR = Pattern.compile("\\w+|(\\w+:)+");
private static void validateMethodSelector(String selector) {
if (!SELECTOR_VALIDATOR.matcher(selector).matches()) {
ErrorUtil.error("Invalid method selector: " + selector);
}
}
private static String getMethodName(ExecutableElement method) {
if (ElementUtil.isConstructor(method)) {
return "init";
}
String name = ElementUtil.getName(method);
if (isReservedName(name)) {
name += "__";
}
return name;
}
private boolean appendParamKeyword(
StringBuilder sb, TypeMirror paramType, char delim, boolean first) {
String keyword = parameterKeyword(paramType);
if (first) {
keyword = capitalize(keyword);
}
sb.append(keyword).append(delim);
return false;
}
private String addParamNames(ExecutableElement method, String name, char delim) {
StringBuilder sb = new StringBuilder(name);
boolean first = true;
TypeElement declaringClass = ElementUtil.getDeclaringClass(method);
if (ElementUtil.isConstructor(method)) {
for (VariableElement param : captureInfo.getImplicitPrefixParams(declaringClass)) {
first = appendParamKeyword(sb, param.asType(), delim, first);
}
}
for (VariableElement param : method.getParameters()) {
first = appendParamKeyword(sb, param.asType(), delim, first);
}
if (ElementUtil.isConstructor(method)) {
for (VariableElement param : captureInfo.getImplicitPostfixParams(declaringClass)) {
first = appendParamKeyword(sb, param.asType(), delim, first);
}
}
return sb.toString();
}
public String getMethodSelector(ExecutableElement method) {
String selector = methodSelectorCache.get(method);
if (selector != null) {
return selector;
}
selector = getMethodSelectorInner(method);
methodSelectorCache.put(method, selector);
return selector;
}
private String getMethodSelectorInner(ExecutableElement method) {
String selector = ElementUtil.getSelector(method);
if (selector != null) {
return selector;
}
if (ElementUtil.isInstanceMethod(method)) {
method = getOriginalMethod(method);
}
selector = getRenamedMethodName(method);
return selectorForMethodName(method, selector != null ? selector : getMethodName(method));
}
private String getRenamedMethodName(ExecutableElement method) {
String selector = methodMappings.get(Mappings.getMethodKey(method, typeUtil));
if (selector != null) {
validateMethodSelector(selector);
return selector;
}
selector = getMethodNameFromAnnotation(method);
if (selector != null) {
return selector;
}
return null;
}
public String selectorForMethodName(ExecutableElement method, String name) {
if (name.contains(":")) {
return name;
}
return addParamNames(method, name, ':');
}
/**
* Returns a "Type_method" function name for static methods, such as from
* enum types. A combination of classname plus modified selector is
* guaranteed to be unique within the app.
*/
public String getFullFunctionName(ExecutableElement method) {
return getFullName(ElementUtil.getDeclaringClass(method)) + '_' + getFunctionName(method);
}
/**
* Returns the name of the allocating constructor that returns a retained
* object. The name will take the form of "new_TypeName_ConstructorName".
*/
public String getAllocatingConstructorName(ExecutableElement method) {
return "new_" + getFullFunctionName(method);
}
/**
* Returns the name of the allocating constructor that returns a released
* object. The name will take the form of "create_TypeName_ConstructorName".
*/
public String getReleasingConstructorName(ExecutableElement method) {
return "create_" + getFullFunctionName(method);
}
/**
* Returns an appropriate name to use for this method as a function. This name
* is guaranteed to be unique within the declaring class, if no methods in the
* class have a renaming. The returned name should be given an appropriate
* prefix to avoid collisions with methods from other classes.
*/
public String getFunctionName(ExecutableElement method) {
String name = ElementUtil.getSelector(method);
if (name == null) {
name = getRenamedMethodName(method);
}
if (name != null) {
return name.replaceAll(":", "_");
} else {
return addParamNames(method, getMethodName(method), '_');
}
}
public static String getMethodNameFromAnnotation(ExecutableElement method) {
AnnotationMirror annotation = ElementUtil.getAnnotation(method, ObjectiveCName.class);
if (annotation != null) {
String value = (String) ElementUtil.getAnnotationValue(annotation, "value");
validateMethodSelector(value);
return value;
}
return null;
}
private ExecutableElement getOriginalMethod(ExecutableElement method) {
TypeElement declaringClass = ElementUtil.getDeclaringClass(method);
return getOriginalMethod(method, declaringClass, declaringClass);
}
/**
* Finds the original method element to use for generating a selector. The method returned is the
* first method found in the hierarchy while traversing in order of declared inheritance that
* doesn't override a method from a supertype. (ie. it is the first leaf node found in the tree of
* overriding methods)
*/
private ExecutableElement getOriginalMethod(
ExecutableElement topMethod, TypeElement declaringClass, TypeElement currentType) {
if (currentType == null) {
return null;
}
// TODO(tball): simplify to ElementUtil.getSuperclass() when javac update is complete.
TypeElement superclass = currentType.getKind().isInterface()
? typeUtil.getJavaObject() : ElementUtil.getSuperclass(currentType);
ExecutableElement original = getOriginalMethod(topMethod, declaringClass, superclass);
if (original != null) {
return original;
}
for (TypeMirror supertype : currentType.getInterfaces()) {
original = getOriginalMethod(topMethod, declaringClass, TypeUtil.asTypeElement(supertype));
if (original != null) {
return original;
}
}
if (declaringClass == currentType) {
return topMethod;
}
for (ExecutableElement candidate : ElementUtil.getMethods(currentType)) {
if (ElementUtil.isInstanceMethod(candidate)
&& elementUtil.overrides(topMethod, candidate, declaringClass)) {
return candidate;
}
}
return null;
}
/**
* Converts a Java type to an equivalent Objective-C type, returning "id" for an object type.
*/
public static String getPrimitiveObjCType(TypeMirror type) {
return TypeUtil.isVoid(type) ? "void"
: type.getKind().isPrimitive() ? "j" + TypeUtil.getName(type) : "id";
}
/**
* Convert a Java type to an equivalent Objective-C type with type variables
* resolved to their bounds.
*/
public String getObjCType(TypeMirror type) {
return getObjcTypeInner(type, null);
}
public String getObjCType(VariableElement var) {
return getObjcTypeInner(var.asType(), ElementUtil.getTypeQualifiers(var));
}
/**
* Convert a Java type into the equivalent JNI type.
*/
public String getJniType(TypeMirror type) {
if (TypeUtil.isPrimitiveOrVoid(type)) {
return getPrimitiveObjCType(type);
} else if (TypeUtil.isArray(type)) {
return "jarray";
} else if (typeUtil.isString(type)) {
return "jstring";
} else if (typeUtil.isClassType(type)) {
return "jclass";
}
return "jobject";
}
private String getObjcTypeInner(TypeMirror type, String qualifiers) {
String objcType;
if (type instanceof NativeType) {
objcType = ((NativeType) type).getName();
} else if (type instanceof PointerType) {
String pointeeQualifiers = null;
if (qualifiers != null) {
int idx = qualifiers.indexOf('*');
if (idx != -1) {
pointeeQualifiers = qualifiers.substring(0, idx);
qualifiers = qualifiers.substring(idx + 1);
}
}
objcType = getObjcTypeInner(((PointerType) type).getPointeeType(), pointeeQualifiers);
objcType = objcType.endsWith("*") ? objcType + "*" : objcType + " *";
} else if (TypeUtil.isPrimitiveOrVoid(type)) {
objcType = getPrimitiveObjCType(type);
} else {
objcType = constructObjcTypeFromBounds(type);
}
if (qualifiers != null) {
qualifiers = qualifiers.trim();
if (!qualifiers.isEmpty()) {
objcType += " " + qualifiers;
}
}
return objcType;
}
private String constructObjcTypeFromBounds(TypeMirror type) {
String classType = null;
List<String> interfaces = new ArrayList<>();
for (TypeElement bound : typeUtil.getObjcUpperBounds(type)) {
if (bound.getKind().isInterface()) {
interfaces.add(getFullName(bound));
} else {
assert classType == null : "Cannot have multiple class bounds";
classType = getFullName(bound);
}
}
String protocols = interfaces.isEmpty() ? "" : "<" + Joiner.on(", ").join(interfaces) + ">";
return classType == null ? ID_TYPE + protocols : classType + protocols + " *";
}
public static String getNativeEnumName(String typeName) {
return typeName + "_Enum";
}
/**
* Return the full name of a type, including its package. For outer types,
* is the type's full name; for example, java.lang.Object's full name is
* "JavaLangObject". For inner classes, the full name is their outer class'
* name plus the inner class name; for example, java.util.ArrayList.ListItr's
* name is "JavaUtilArrayList_ListItr".
*/
public String getFullName(TypeElement element) {
element = typeUtil.getObjcClass(element);
String fullName = fullNameCache.get(element);
if (fullName == null) {
fullName = getFullNameImpl(element);
fullNameCache.put(element, fullName);
}
return fullName;
}
private String getFullNameImpl(TypeElement element) {
// Avoid package prefix renaming for package-info types, and use a valid ObjC name that doesn't
// have a dash character.
if (ElementUtil.isPackageInfo(element)) {
return camelCaseQualifiedName(ElementUtil.getName(ElementUtil.getPackage(element)))
+ PACKAGE_INFO_OBJC_NAME;
}
// Use ObjectiveCName annotation, if it exists.
AnnotationMirror annotation = ElementUtil.getAnnotation(element, ObjectiveCName.class);
if (annotation != null) {
return (String) ElementUtil.getAnnotationValue(annotation, "value");
}
TypeElement outerClass = ElementUtil.getDeclaringClass(element);
if (outerClass != null) {
return getFullName(outerClass) + '_' + getTypeSubName(element);
}
// Use mapping file entry, if it exists.
String mappedName = classMappings.get(ElementUtil.getQualifiedName(element));
if (mappedName != null) {
return mappedName;
}
// Use camel-cased package+class name.
return getPrefix(ElementUtil.getPackage(element)) + getTypeSubName(element);
}
private String getTypeSubName(TypeElement element) {
if (ElementUtil.isLambda(element)) {
return ElementUtil.getName(element);
} else if (ElementUtil.isLocal(element)) {
String binaryName = elementUtil.getBinaryName(element);
int innerClassIndex = ElementUtil.isAnonymous(element)
? binaryName.length() : binaryName.lastIndexOf(ElementUtil.getName(element));
while (innerClassIndex > 0 && binaryName.charAt(innerClassIndex - 1) != '$') {
--innerClassIndex;
}
return binaryName.substring(innerClassIndex);
}
return ElementUtil.getName(element).replace('$', '_');
}
private static boolean isReservedName(String name) {
return reservedNames.contains(name) || nsObjectMessages.contains(name);
}
private String getPrefix(PackageElement packageElement) {
return prefixMap.getPrefix(packageElement);
}
/** Ignores the ObjectiveCName annotation. */
private String getDefaultObjectiveCName(TypeElement element) {
String binaryName = elementUtil.getBinaryName(element);
return camelCaseQualifiedName(binaryName).replace('$', '_');
}
public Optional<String> getNameMapping(TypeElement typeElement, String typeName) {
final String mappingFormat = "J2OBJC_NAME_MAPPING(%s, \"%s\", \"%s\")\n";
// No mapping is needed if the default Objective-C name was not modified.
if (typeName.equals(getDefaultObjectiveCName(typeElement))) {
return Optional.empty();
}
// Return a class mapping only if there is a explicit rename.
AnnotationMirror annotation = ElementUtil.getAnnotation(typeElement, ObjectiveCName.class);
String mappedName = classMappings.get(ElementUtil.getQualifiedName(typeElement));
if (annotation != null || mappedName != null) {
return Optional.of(
String.format(mappingFormat, typeName, elementUtil.getBinaryName(typeElement), typeName));
}
// Otherwise, there was a package rename. Because only one package mapping is needed per
// generation unit, it is safe to generate it together with a public class.
if (ElementUtil.isTopLevel(typeElement) && ElementUtil.isPublic(typeElement)) {
PackageElement packageElement = ElementUtil.getPackage(typeElement);
String packageName = packageElement.getQualifiedName().toString();
String mappedPackageName = getPrefix(packageElement);
return Optional.of(
String.format(mappingFormat, typeName, packageName, mappedPackageName));
}
return Optional.empty();
}
/**
* Verifies that a fully-qualified class name is lexically correct. This method does
* not check whether the class actually exists, however. It also will return true for
* valid package names, since they cannot be distinguished except by parsing context.
*/
public static boolean isValidClassName(String className) {
return JAVA_CLASS_NAME_PATTERN.matcher(className).matches();
}
}
| |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.project;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.Files;
import com.google.gerrit.common.data.AccessSection;
import com.google.gerrit.common.data.GroupReference;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.LabelTypes;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.Project.InheritableBoolean;
import com.google.gerrit.rules.PrologEnvironment;
import com.google.gerrit.rules.RulesCache;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.account.CapabilityCollection;
import com.google.gerrit.server.account.GroupMembership;
import com.google.gerrit.server.config.AllProjectsName;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.git.ProjectConfig;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import com.googlecode.prolog_cafe.compiler.CompileException;
import com.googlecode.prolog_cafe.lang.PrologMachineCopy;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** Cached information on a project. */
public class ProjectState {
private static final Logger log =
LoggerFactory.getLogger(ProjectState.class);
public interface Factory {
ProjectState create(ProjectConfig config);
}
private final boolean isAllProjects;
private final SitePaths sitePaths;
private final AllProjectsName allProjectsName;
private final ProjectCache projectCache;
private final ProjectControl.AssistedFactory projectControlFactory;
private final PrologEnvironment.Factory envFactory;
private final GitRepositoryManager gitMgr;
private final RulesCache rulesCache;
private final List<CommentLinkInfo> commentLinks;
private final ProjectConfig config;
private final Set<AccountGroup.UUID> localOwners;
/** Prolog rule state. */
private volatile PrologMachineCopy rulesMachine;
/** Last system time the configuration's revision was examined. */
private volatile long lastCheckTime;
/** Local access sections, wrapped in SectionMatchers for faster evaluation. */
private volatile List<SectionMatcher> localAccessSections;
/** Theme information loaded from site_path/themes. */
private volatile ThemeInfo theme;
/** If this is all projects, the capabilities used by the server. */
private final CapabilityCollection capabilities;
@Inject
public ProjectState(
final SitePaths sitePaths,
final ProjectCache projectCache,
final AllProjectsName allProjectsName,
final ProjectControl.AssistedFactory projectControlFactory,
final PrologEnvironment.Factory envFactory,
final GitRepositoryManager gitMgr,
final RulesCache rulesCache,
final List<CommentLinkInfo> commentLinks,
@Assisted final ProjectConfig config) {
this.sitePaths = sitePaths;
this.projectCache = projectCache;
this.isAllProjects = config.getProject().getNameKey().equals(allProjectsName);
this.allProjectsName = allProjectsName;
this.projectControlFactory = projectControlFactory;
this.envFactory = envFactory;
this.gitMgr = gitMgr;
this.rulesCache = rulesCache;
this.commentLinks = commentLinks;
this.config = config;
this.capabilities = isAllProjects
? new CapabilityCollection(config.getAccessSection(AccessSection.GLOBAL_CAPABILITIES))
: null;
if (isAllProjects && !Permission.canBeOnAllProjects(AccessSection.ALL, Permission.OWNER)) {
localOwners = Collections.emptySet();
} else {
HashSet<AccountGroup.UUID> groups = new HashSet<AccountGroup.UUID>();
AccessSection all = config.getAccessSection(AccessSection.ALL);
if (all != null) {
Permission owner = all.getPermission(Permission.OWNER);
if (owner != null) {
for (PermissionRule rule : owner.getRules()) {
GroupReference ref = rule.getGroup();
if (ref.getUUID() != null) {
groups.add(ref.getUUID());
}
}
}
}
localOwners = Collections.unmodifiableSet(groups);
}
}
boolean needsRefresh(long generation) {
if (generation <= 0) {
return isRevisionOutOfDate();
}
if (lastCheckTime != generation) {
lastCheckTime = generation;
return isRevisionOutOfDate();
}
return false;
}
private boolean isRevisionOutOfDate() {
try {
Repository git = gitMgr.openRepository(getProject().getNameKey());
try {
Ref ref = git.getRef(GitRepositoryManager.REF_CONFIG);
if (ref == null || ref.getObjectId() == null) {
return true;
}
return !ref.getObjectId().equals(config.getRevision());
} finally {
git.close();
}
} catch (IOException gone) {
return true;
}
}
/**
* @return cached computation of all global capabilities. This should only be
* invoked on the state from {@link ProjectCache#getAllProjects()}.
* Null on any other project.
*/
public CapabilityCollection getCapabilityCollection() {
return capabilities;
}
/** @return Construct a new PrologEnvironment for the calling thread. */
public PrologEnvironment newPrologEnvironment() throws CompileException {
PrologMachineCopy pmc = rulesMachine;
if (pmc == null) {
pmc = rulesCache.loadMachine(
getProject().getNameKey(),
config.getRulesId());
rulesMachine = pmc;
}
return envFactory.create(pmc);
}
/**
* Like {@link #newPrologEnvironment()} but instead of reading the rules.pl
* read the provided input stream.
*
* @param name a name of the input stream. Could be any name.
* @param in InputStream to read prolog rules from
* @throws CompileException
*/
public PrologEnvironment newPrologEnvironment(String name, InputStream in)
throws CompileException {
PrologMachineCopy pmc = rulesCache.loadMachine(name, in);
return envFactory.create(pmc);
}
public Project getProject() {
return config.getProject();
}
public ProjectConfig getConfig() {
return config;
}
/** Get the sections that pertain only to this project. */
List<SectionMatcher> getLocalAccessSections() {
List<SectionMatcher> sm = localAccessSections;
if (sm == null) {
Collection<AccessSection> fromConfig = config.getAccessSections();
sm = new ArrayList<SectionMatcher>(fromConfig.size());
for (AccessSection section : fromConfig) {
if (isAllProjects) {
List<Permission> copy =
Lists.newArrayListWithCapacity(section.getPermissions().size());
for (Permission p : section.getPermissions()) {
if (Permission.canBeOnAllProjects(section.getName(), p.getName())) {
copy.add(p);
}
}
section = new AccessSection(section.getName());
section.setPermissions(copy);
}
SectionMatcher matcher = SectionMatcher.wrap(getProject().getNameKey(),
section);
if (matcher != null) {
sm.add(matcher);
}
}
localAccessSections = sm;
}
return sm;
}
/**
* Obtain all local and inherited sections. This collection is looked up
* dynamically and is not cached. Callers should try to cache this result
* per-request as much as possible.
*/
List<SectionMatcher> getAllSections() {
if (isAllProjects) {
return getLocalAccessSections();
}
List<SectionMatcher> all = Lists.newArrayList();
for (ProjectState s : tree()) {
all.addAll(s.getLocalAccessSections());
}
return all;
}
/**
* @return all {@link AccountGroup}'s to which the owner privilege for
* 'refs/*' is assigned for this project (the local owners), if there
* are no local owners the local owners of the nearest parent project
* that has local owners are returned
*/
public Set<AccountGroup.UUID> getOwners() {
for (ProjectState p : tree()) {
if (!p.localOwners.isEmpty()) {
return p.localOwners;
}
}
return Collections.emptySet();
}
/**
* @return true if any of the groups listed in {@code groups} was declared to
* be an owner of this project, or one of its parent projects..
*/
boolean isOwner(final GroupMembership groups) {
return Iterables.any(tree(), new Predicate<ProjectState>() {
@Override
public boolean apply(ProjectState in) {
return groups.containsAnyOf(in.localOwners);
}
});
}
public ProjectControl controlFor(final CurrentUser user) {
return projectControlFactory.create(user, this);
}
/**
* @return an iterable that walks through this project and then the parents of
* this project. Starts from this project and progresses up the
* hierarchy to All-Projects.
*/
public Iterable<ProjectState> tree() {
return new Iterable<ProjectState>() {
@Override
public Iterator<ProjectState> iterator() {
return new ProjectHierarchyIterator(
projectCache, allProjectsName,
ProjectState.this);
}
};
}
/**
* @return an iterable that walks in-order from All-Projects through the
* project hierarchy to this project.
*/
public Iterable<ProjectState> treeInOrder() {
List<ProjectState> projects = Lists.newArrayList(tree());
Collections.reverse(projects);
return projects;
}
/**
* @return an iterable that walks through the parents of this project. Starts
* from the immediate parent of this project and progresses up the
* hierarchy to All-Projects.
*/
public Iterable<ProjectState> parents() {
return Iterables.skip(tree(), 1);
}
public boolean isAllProjects() {
return isAllProjects;
}
public boolean isUseContributorAgreements() {
return getInheritableBoolean(new Function<Project, InheritableBoolean>() {
@Override
public InheritableBoolean apply(Project input) {
return input.getUseContributorAgreements();
}
});
}
public boolean isUseContentMerge() {
return getInheritableBoolean(new Function<Project, InheritableBoolean>() {
@Override
public InheritableBoolean apply(Project input) {
return input.getUseContentMerge();
}
});
}
public boolean isUseSignedOffBy() {
return getInheritableBoolean(new Function<Project, InheritableBoolean>() {
@Override
public InheritableBoolean apply(Project input) {
return input.getUseSignedOffBy();
}
});
}
public boolean isRequireChangeID() {
return getInheritableBoolean(new Function<Project, InheritableBoolean>() {
@Override
public InheritableBoolean apply(Project input) {
return input.getRequireChangeID();
}
});
}
public LabelTypes getLabelTypes() {
Map<String, LabelType> types = Maps.newLinkedHashMap();
for (ProjectState s : treeInOrder()) {
for (LabelType type : s.getConfig().getLabelSections().values()) {
String lower = type.getName().toLowerCase();
LabelType old = types.get(lower);
if (old == null || old.canOverride()) {
types.put(lower, type);
}
}
}
List<LabelType> all = Lists.newArrayListWithCapacity(types.size());
for (LabelType type : types.values()) {
if (!type.getValues().isEmpty()) {
all.add(type);
}
}
return new LabelTypes(Collections.unmodifiableList(all));
}
public List<CommentLinkInfo> getCommentLinks() {
Map<String, CommentLinkInfo> cls = Maps.newLinkedHashMap();
for (CommentLinkInfo cl : commentLinks) {
cls.put(cl.name.toLowerCase(), cl);
}
for (ProjectState s : treeInOrder()) {
for (CommentLinkInfo cl : s.getConfig().getCommentLinkSections()) {
String name = cl.name.toLowerCase();
if (cl.isOverrideOnly()) {
CommentLinkInfo parent = cls.get(name);
if (parent == null) {
continue; // Ignore invalid overrides.
}
cls.put(name, cl.inherit(parent));
} else {
cls.put(name, cl);
}
}
}
return ImmutableList.copyOf(cls.values());
}
public ThemeInfo getTheme() {
ThemeInfo theme = this.theme;
if (theme == null) {
synchronized (this) {
theme = this.theme;
if (theme == null) {
theme = loadTheme();
this.theme = theme;
}
}
}
if (theme == ThemeInfo.INHERIT) {
ProjectState parent = Iterables.getFirst(parents(), null);
return parent != null ? parent.getTheme() : null;
}
return theme;
}
private ThemeInfo loadTheme() {
String name = getConfig().getProject().getName();
File dir = new File(sitePaths.themes_dir, name);
if (!dir.exists()) {
return ThemeInfo.INHERIT;
} else if (!dir.isDirectory()) {
log.warn("Bad theme for {}: not a directory", name);
return ThemeInfo.INHERIT;
}
try {
return new ThemeInfo(readFile(new File(dir, SitePaths.CSS_FILENAME)),
readFile(new File(dir, SitePaths.HEADER_FILENAME)),
readFile(new File(dir, SitePaths.FOOTER_FILENAME)));
} catch (IOException e) {
log.error("Error reading theme for " + name, e);
return ThemeInfo.INHERIT;
}
}
private String readFile(File f) throws IOException {
return f.exists() ? Files.toString(f, Charsets.UTF_8) : null;
}
private boolean getInheritableBoolean(Function<Project, InheritableBoolean> func) {
for (ProjectState s : tree()) {
switch (func.apply(s.getProject())) {
case TRUE:
return true;
case FALSE:
return false;
case INHERIT:
default:
continue;
}
}
return false;
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.core.diff;
import static org.junit.Assert.*;
import java.awt.Window;
import org.junit.*;
import docking.ActionContext;
import docking.action.DockingActionIf;
import docking.action.ToggleDockingAction;
import docking.util.image.ToolIconURL;
import ghidra.app.plugin.core.codebrowser.CodeBrowserPlugin;
import ghidra.app.plugin.core.progmgr.ProgramManagerPlugin;
import ghidra.framework.main.FrontEndPlugin;
import ghidra.framework.plugintool.PluginTool;
import ghidra.framework.project.tool.GhidraTool;
import ghidra.program.database.ProgramDB;
import ghidra.test.ClassicSampleX86ProgramBuilder;
import ghidra.test.TestEnv;
public class DiffSaveSettingsTest extends DiffApplyTestAdapter {
@Override
@Before
public void setUp() throws Exception {
fixupGUI();
env = new TestEnv();
frontEndTool = env.showFrontEndTool();
frontEndPlugin = getPlugin(frontEndTool, FrontEndPlugin.class);
}
private void launchTool() throws Exception {
// Launch our own tool for the Diff so that we can close it and handle "Save Tool?".
runSwing(() -> tool =
(PluginTool) frontEndTool.getProject().getToolServices().launchTool("MyDiffTestTool",
null));
cb = getPlugin(tool, CodeBrowserPlugin.class);
diffPlugin = getPlugin(tool, ProgramDiffPlugin.class);
diffListingPanel = diffPlugin.getListingPanel();
fp1 = cb.getFieldPanel();
fp2 = diffListingPanel.getFieldPanel();
openClosePgm2 = (ToggleDockingAction) getAction(diffPlugin, "Open/Close Program View");
}
private void showNewTool() throws Exception {
// Create our own tool for the Diff so that we can close it and handle "Save Tool?".
runSwing(() -> {
tool = new GhidraTool(frontEndTool.getProject(), "MyDiffTestTool");
tool.setIconURL(new ToolIconURL("preferences-system.png"));
tool.setVisible(true);
});
tool.addPlugin(ProgramManagerPlugin.class.getName());
setUpCodeBrowserTool(tool);
diffListingPanel = diffPlugin.getListingPanel();
fp1 = cb.getFieldPanel();
fp2 = diffListingPanel.getFieldPanel();
openClosePgm2 = (ToggleDockingAction) getAction(diffPlugin, "Open/Close Program View");
}
@Override
@After
public void tearDown() {
Window win = getWindow("Select Other Program");
if (win != null) {
//This window should not be up, so cancel it.
pressButton(win, "Cancel");
}
closeOurTool();
env.dispose();
}
void closeOurTool() {
if (tool == null) {
return;
}
DockingActionIf closeToolAction = getToolAction(tool, "Close Tool");
if (closeToolAction == null) {
return;
}
performAction(closeToolAction, false);
try {
tool.getToolFrame();
}
catch (RuntimeException e1) {
tool = null;
return; // The tool is closed.
}
tool = null;
}
@Test
public void testSaveDiffApplySettings() throws Exception {
// String p3Name = "notepadSetup1ForDiffTest";
// String p4Name = "notepadSetup2ForDiffTest";
ClassicSampleX86ProgramBuilder builder = new ClassicSampleX86ProgramBuilder();
ProgramDB p3 = builder.getProgram();
ProgramDB p4 = builder.getProgram();
showNewTool();
openProgram(p3);
openDiff(p4);
showApplySettings();
isReplace(programContextApplyCB);
isReplace(byteApplyCB);
isReplace(codeUnitApplyCB);
isReplace(refApplyCB);
isMerge(plateCommentApplyCB);
isMerge(preCommentApplyCB);
isMerge(eolCommentApplyCB);
isMerge(repeatableCommentApplyCB);
isMerge(postCommentApplyCB);
isMergeSetPrimary(labelApplyCB);
isReplace(functionApplyCB);
isReplace(bookmarkApplyCB);
isReplace(propertiesApplyCB);
// Change the apply settings.
ignore(programContextApplyCB);
ignore(byteApplyCB);
ignore(codeUnitApplyCB);
ignore(refApplyCB);
replace(plateCommentApplyCB);
replace(preCommentApplyCB);
replace(eolCommentApplyCB);
replace(repeatableCommentApplyCB);
replace(postCommentApplyCB);
merge(labelApplyCB);
ignore(functionApplyCB);
ignore(bookmarkApplyCB);
ignore(propertiesApplyCB);
// Save the settings.
DockingActionIf saveApplySettingsAction =
getAction(diffPlugin, "Save Default Diff Apply Settings");
assertNotNull(saveApplySettingsAction);
performAction(saveApplySettingsAction, true);
// Check the settings.
isIgnore(programContextApplyCB);
isIgnore(byteApplyCB);
isIgnore(codeUnitApplyCB);
isIgnore(refApplyCB);
isReplace(plateCommentApplyCB);
isReplace(preCommentApplyCB);
isReplace(eolCommentApplyCB);
isReplace(repeatableCommentApplyCB);
isReplace(postCommentApplyCB);
isMerge(labelApplyCB);
isIgnore(functionApplyCB);
isIgnore(bookmarkApplyCB);
isIgnore(propertiesApplyCB);
ProgramManagerPlugin pm = getPlugin(tool, ProgramManagerPlugin.class);
DockingActionIf closeAllProgramAction = getAction(pm, "Close All");
assertNotNull(closeAllProgramAction);
ActionContext defaultContext = tool.getDefaultToolContext();
performAction(closeAllProgramAction, defaultContext, true);
openProgram(p3);
openDiff(p4);
showApplySettings();
// Check the settings.
isIgnore(programContextApplyCB);
isIgnore(byteApplyCB);
isIgnore(codeUnitApplyCB);
isIgnore(refApplyCB);
isReplace(plateCommentApplyCB);
isReplace(preCommentApplyCB);
isReplace(eolCommentApplyCB);
isReplace(repeatableCommentApplyCB);
isReplace(postCommentApplyCB);
isMerge(labelApplyCB);
isIgnore(functionApplyCB);
isIgnore(bookmarkApplyCB);
isIgnore(propertiesApplyCB);
DockingActionIf closeToolAction = getToolAction(tool, "Close Tool");
performAction(closeToolAction, false);
// Save Tool? (Save)
Window dialog = waitForWindow("Save Tool?");
assertNotNull("Couldn't find 'Save Tool?' dialog.", dialog);
pressButtonByText(dialog, "Save");
launchTool();
// Open another Diff.
openProgram(p3);
openDiff(p4);
showApplySettings();
// Check the settings.
isIgnore(programContextApplyCB);
isIgnore(byteApplyCB);
isIgnore(codeUnitApplyCB);
isIgnore(refApplyCB);
isReplace(plateCommentApplyCB);
isReplace(preCommentApplyCB);
isReplace(eolCommentApplyCB);
isReplace(repeatableCommentApplyCB);
isReplace(postCommentApplyCB);
isMerge(labelApplyCB);
isIgnore(functionApplyCB);
isIgnore(bookmarkApplyCB);
isIgnore(propertiesApplyCB);
closeOurTool();
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.template.impl;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.template.EverywhereContextType;
import com.intellij.codeInsight.template.TemplateContextType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupAdapter;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.LightweightWindowEvent;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.ui.GridBag;
import com.intellij.util.ui.PlatformColors;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.util.ui.update.Activatable;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
public class LiveTemplateSettingsEditor extends JPanel {
private final TemplateImpl myTemplate;
private final JTextField myKeyField;
private final JTextField myDescription;
private final Editor myTemplateEditor;
private JComboBox myExpandByCombo;
private final String myDefaultShortcutItem;
private JCheckBox myCbReformat;
private JButton myEditVariablesButton;
private static final String SPACE = CodeInsightBundle.message("template.shortcut.space");
private static final String TAB = CodeInsightBundle.message("template.shortcut.tab");
private static final String ENTER = CodeInsightBundle.message("template.shortcut.enter");
private final Map<TemplateOptionalProcessor, Boolean> myOptions;
private final Map<TemplateContextType, Boolean> myContext;
private JBPopup myContextPopup;
private Dimension myLastSize;
public LiveTemplateSettingsEditor(TemplateImpl template,
final String defaultShortcut,
Map<TemplateOptionalProcessor, Boolean> options,
Map<TemplateContextType, Boolean> context, final Runnable nodeChanged, boolean allowNoContext) {
super(new BorderLayout());
myOptions = options;
myContext = context;
myTemplate = template;
myDefaultShortcutItem = CodeInsightBundle.message("dialog.edit.template.shortcut.default", defaultShortcut);
myKeyField=new JTextField();
myDescription=new JTextField();
myTemplateEditor = TemplateEditorUtil.createEditor(false, myTemplate.getString(), context);
myTemplate.setId(null);
createComponents(allowNoContext);
myKeyField.getDocument().addDocumentListener(new com.intellij.ui.DocumentAdapter() {
@Override
protected void textChanged(javax.swing.event.DocumentEvent e) {
myTemplate.setKey(myKeyField.getText().trim());
nodeChanged.run();
}
});
myDescription.getDocument().addDocumentListener(new com.intellij.ui.DocumentAdapter() {
@Override
protected void textChanged(javax.swing.event.DocumentEvent e) {
myTemplate.setDescription(myDescription.getText().trim());
nodeChanged.run();
}
});
new UiNotifyConnector(this, new Activatable.Adapter() {
@Override
public void hideNotify() {
disposeContextPopup();
}
});
}
public TemplateImpl getTemplate() {
return myTemplate;
}
public void dispose() {
final Project project = myTemplateEditor.getProject();
if (project != null) {
final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(myTemplateEditor.getDocument());
if (psiFile != null) {
DaemonCodeAnalyzer.getInstance(project).setHighlightingEnabled(psiFile, true);
}
}
EditorFactory.getInstance().releaseEditor(myTemplateEditor);
}
private void createComponents(boolean allowNoContexts) {
JPanel panel = new JPanel(new GridBagLayout());
GridBag gb = new GridBag().setDefaultInsets(4, 4, 4, 4).setDefaultWeightY(1).setDefaultFill(GridBagConstraints.BOTH);
JPanel editorPanel = new JPanel(new BorderLayout(4, 4));
editorPanel.setPreferredSize(new Dimension(250, 100));
editorPanel.setMinimumSize(editorPanel.getPreferredSize());
editorPanel.add(myTemplateEditor.getComponent(), BorderLayout.CENTER);
JLabel templateTextLabel = new JLabel(CodeInsightBundle.message("dialog.edit.template.template.text.title"));
templateTextLabel.setLabelFor(myTemplateEditor.getContentComponent());
editorPanel.add(templateTextLabel, BorderLayout.NORTH);
panel.add(editorPanel, gb.nextLine().next().weighty(1).weightx(1).coverColumn(2));
myEditVariablesButton = new JButton(CodeInsightBundle.message("dialog.edit.template.button.edit.variables"));
myEditVariablesButton.setDefaultCapable(false);
myEditVariablesButton.setMaximumSize(myEditVariablesButton.getPreferredSize());
panel.add(myEditVariablesButton, gb.next().weighty(0));
panel.add(createTemplateOptionsPanel(), gb.nextLine().next().next().coverColumn(2).weighty(1));
panel.add(createShortContextPanel(allowNoContexts), gb.nextLine().next().weighty(0).fillCellNone().anchor(GridBagConstraints.WEST));
myTemplateEditor.getDocument().addDocumentListener(
new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
validateEditVariablesButton();
myTemplate.setString(myTemplateEditor.getDocument().getText());
applyVariables(updateVariablesByTemplateText());
}
}
);
myEditVariablesButton.addActionListener(
new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
editVariables();
}
}
);
add(createNorthPanel(), BorderLayout.NORTH);
add(panel, BorderLayout.CENTER);
}
private void applyVariables(final List<Variable> variables) {
myTemplate.removeAllParsed();
for (Variable variable : variables) {
myTemplate.addVariable(variable.getName(), variable.getExpressionString(), variable.getDefaultValueString(),
variable.isAlwaysStopAt());
}
myTemplate.parseSegments();
}
@Nullable
private JComponent createNorthPanel() {
JPanel panel = new JPanel(new GridBagLayout());
GridBag gb = new GridBag().setDefaultInsets(4, 4, 4, 4).setDefaultWeightY(1).setDefaultFill(GridBagConstraints.BOTH);
JLabel keyPrompt = new JLabel(CodeInsightBundle.message("dialog.edit.template.label.abbreviation"));
keyPrompt.setLabelFor(myKeyField);
panel.add(keyPrompt, gb.nextLine().next());
panel.add(myKeyField, gb.next().weightx(1));
JLabel descriptionPrompt = new JLabel(CodeInsightBundle.message("dialog.edit.template.label.description"));
descriptionPrompt.setLabelFor(myDescription);
panel.add(descriptionPrompt, gb.next());
panel.add(myDescription, gb.next().weightx(3));
return panel;
}
private JPanel createTemplateOptionsPanel() {
JPanel panel = new JPanel();
panel.setBorder(IdeBorderFactory.createTitledBorder(CodeInsightBundle.message("dialog.edit.template.options.title"),
true));
panel.setLayout(new GridBagLayout());
GridBagConstraints gbConstraints = new GridBagConstraints();
gbConstraints.fill = GridBagConstraints.BOTH;
gbConstraints.weighty = 0;
gbConstraints.weightx = 0;
gbConstraints.gridy = 0;
JLabel expandWithLabel = new JLabel(CodeInsightBundle.message("dialog.edit.template.label.expand.with"));
panel.add(expandWithLabel, gbConstraints);
gbConstraints.gridx = 1;
gbConstraints.insets = new Insets(0, 4, 0, 0);
myExpandByCombo = new JComboBox(new Object[]{myDefaultShortcutItem, SPACE, TAB, ENTER});
myExpandByCombo.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
Object selectedItem = myExpandByCombo.getSelectedItem();
if(myDefaultShortcutItem.equals(selectedItem)) {
myTemplate.setShortcutChar(TemplateSettings.DEFAULT_CHAR);
}
else if(TAB.equals(selectedItem)) {
myTemplate.setShortcutChar(TemplateSettings.TAB_CHAR);
}
else if(ENTER.equals(selectedItem)) {
myTemplate.setShortcutChar(TemplateSettings.ENTER_CHAR);
}
else {
myTemplate.setShortcutChar(TemplateSettings.SPACE_CHAR);
}
}
});
expandWithLabel.setLabelFor(myExpandByCombo);
panel.add(myExpandByCombo, gbConstraints);
gbConstraints.weightx = 1;
gbConstraints.gridx = 2;
panel.add(new JPanel(), gbConstraints);
gbConstraints.gridx = 0;
gbConstraints.gridy++;
gbConstraints.gridwidth = 3;
myCbReformat = new JCheckBox(CodeInsightBundle.message("dialog.edit.template.checkbox.reformat.according.to.style"));
panel.add(myCbReformat, gbConstraints);
for (final TemplateOptionalProcessor processor: myOptions.keySet()) {
if (!processor.isVisible(myTemplate)) continue;
gbConstraints.gridy++;
final JCheckBox cb = new JCheckBox(processor.getOptionName());
panel.add(cb, gbConstraints);
cb.setSelected(myOptions.get(processor).booleanValue());
cb.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
myOptions.put(processor, cb.isSelected());
}
});
}
gbConstraints.weighty = 1;
gbConstraints.gridy++;
panel.add(new JPanel(), gbConstraints);
return panel;
}
private List<TemplateContextType> getApplicableContexts() {
ArrayList<TemplateContextType> result = new ArrayList<TemplateContextType>();
for (TemplateContextType type : myContext.keySet()) {
if (myContext.get(type).booleanValue()) {
result.add(type);
}
}
return result;
}
private JPanel createShortContextPanel(final boolean allowNoContexts) {
JPanel panel = new JPanel(new BorderLayout());
final JLabel ctxLabel = new JLabel();
final JLabel change = new JLabel();
change.setForeground(PlatformColors.BLUE);
change.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
panel.add(ctxLabel, BorderLayout.CENTER);
panel.add(change, BorderLayout.EAST);
final Runnable updateLabel = new Runnable() {
@Override
public void run() {
StringBuilder sb = new StringBuilder();
String oldPrefix = "";
for (TemplateContextType type : getApplicableContexts()) {
final TemplateContextType base = type.getBaseContextType();
String ownName = UIUtil.removeMnemonic(type.getPresentableName());
String prefix = "";
if (base != null && !(base instanceof EverywhereContextType)) {
prefix = UIUtil.removeMnemonic(base.getPresentableName()) + ": ";
ownName = StringUtil.decapitalize(ownName);
}
if (type instanceof EverywhereContextType) {
ownName = "Other";
}
if (sb.length() > 0) {
sb.append(oldPrefix.equals(prefix) ? ", " : "; ");
}
if (!oldPrefix.equals(prefix)) {
sb.append(prefix);
oldPrefix = prefix;
}
sb.append(ownName);
}
final boolean noContexts = sb.length() == 0;
ctxLabel.setText((noContexts ? "No applicable contexts" + (allowNoContexts ? "" : " yet") : "Applicable in " + sb.toString()) + ". ");
ctxLabel.setForeground(noContexts ? allowNoContexts ? JBColor.GRAY : JBColor.RED : UIUtil.getLabelForeground());
change.setText(noContexts ? "Define" : "Change");
}
};
new ClickListener() {
@Override
public boolean onClick(MouseEvent e, int clickCount) {
if (disposeContextPopup()) return false;
final JPanel content = createPopupContextPanel(updateLabel);
Dimension prefSize = content.getPreferredSize();
if (myLastSize != null && (myLastSize.width > prefSize.width || myLastSize.height > prefSize.height)) {
content.setPreferredSize(new Dimension(Math.max(prefSize.width, myLastSize.width), Math.max(prefSize.height, myLastSize.height)));
}
myContextPopup = JBPopupFactory.getInstance().createComponentPopupBuilder(content, null).setResizable(true).createPopup();
myContextPopup.show(new RelativePoint(change, new Point(change.getWidth() , -content.getPreferredSize().height - 10)));
myContextPopup.addListener(new JBPopupAdapter() {
@Override
public void onClosed(LightweightWindowEvent event) {
myLastSize = content.getSize();
}
});
return true;
}
}.installOn(change);
updateLabel.run();
return panel;
}
private boolean disposeContextPopup() {
if (myContextPopup != null && myContextPopup.isVisible()) {
myContextPopup.cancel();
myContextPopup = null;
return true;
}
return false;
}
private JPanel createPopupContextPanel(final Runnable onChange) {
JPanel panel = new JPanel(new BorderLayout());
MultiMap<TemplateContextType, TemplateContextType> hierarchy = new MultiMap<TemplateContextType, TemplateContextType>() {
@Override
protected Map<TemplateContextType, Collection<TemplateContextType>> createMap() {
return new LinkedHashMap<TemplateContextType, Collection<TemplateContextType>>();
}
};
for (TemplateContextType type : myContext.keySet()) {
hierarchy.putValue(type.getBaseContextType(), type);
}
final CheckedTreeNode root = new CheckedTreeNode(Pair.create(null, "Hi"));
final CheckboxTree checkboxTree = new CheckboxTree(new CheckboxTree.CheckboxTreeCellRenderer() {
@Override
public void customizeRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
final Object o = ((DefaultMutableTreeNode)value).getUserObject();
if (o instanceof Pair) {
getTextRenderer().append((String)((Pair)o).second);
}
}
}, root) {
@Override
protected void onNodeStateChanged(CheckedTreeNode node) {
final TemplateContextType type = (TemplateContextType)((Pair)node.getUserObject()).first;
if (type != null) {
myContext.put(type, node.isChecked());
}
myExpandByCombo.setEnabled(isExpandableFromEditor());
updateHighlighter();
onChange.run();
}
};
for (TemplateContextType type : hierarchy.get(null)) {
addContextNode(hierarchy, root, type);
}
((DefaultTreeModel)checkboxTree.getModel()).nodeStructureChanged(root);
TreeUtil.traverse(root, new TreeUtil.Traverse() {
@Override
public boolean accept(Object _node) {
final CheckedTreeNode node = (CheckedTreeNode)_node;
if (node.isChecked()) {
checkboxTree.expandPath(new TreePath(node.getPath()).getParentPath());
}
return true;
}
});
panel.add(ScrollPaneFactory.createScrollPane(checkboxTree));
final Dimension size = checkboxTree.getPreferredSize();
panel.setPreferredSize(new Dimension(size.width + 30, Math.min(size.height + 10, 500)));
return panel;
}
private void addContextNode(MultiMap<TemplateContextType, TemplateContextType> hierarchy,
CheckedTreeNode parent,
TemplateContextType type) {
final Collection<TemplateContextType> children = hierarchy.get(type);
final String name = UIUtil.removeMnemonic(type.getPresentableName());
final CheckedTreeNode node = new CheckedTreeNode(Pair.create(children.isEmpty() ? type : null, name));
parent.add(node);
if (children.isEmpty()) {
node.setChecked(myContext.get(type));
}
else {
for (TemplateContextType child : children) {
addContextNode(hierarchy, node, child);
}
final CheckedTreeNode other = new CheckedTreeNode(Pair.create(type, "Other"));
other.setChecked(myContext.get(type));
node.add(other);
}
}
private boolean isExpandableFromEditor() {
boolean hasNonExpandable = false;
for (TemplateContextType type : getApplicableContexts()) {
if (type.isExpandableFromEditor()) {
return true;
}
hasNonExpandable = true;
}
return !hasNonExpandable;
}
private void updateHighlighter() {
List<TemplateContextType> applicableContexts = getApplicableContexts();
if (!applicableContexts.isEmpty()) {
TemplateContext contextByType = new TemplateContext();
contextByType.setEnabled(applicableContexts.get(0), true);
TemplateEditorUtil.setHighlighter(myTemplateEditor, contextByType);
return;
}
((EditorEx) myTemplateEditor).repaint(0, myTemplateEditor.getDocument().getTextLength());
}
private void validateEditVariablesButton() {
myEditVariablesButton.setEnabled(!parseVariables(myTemplateEditor.getDocument().getCharsSequence()).isEmpty());
}
void resetUi() {
myKeyField.setText(myTemplate.getKey());
myDescription.setText(myTemplate.getDescription());
if(myTemplate.getShortcutChar() == TemplateSettings.DEFAULT_CHAR) {
myExpandByCombo.setSelectedItem(myDefaultShortcutItem);
}
else if(myTemplate.getShortcutChar() == TemplateSettings.TAB_CHAR) {
myExpandByCombo.setSelectedItem(TAB);
}
else if(myTemplate.getShortcutChar() == TemplateSettings.ENTER_CHAR) {
myExpandByCombo.setSelectedItem(ENTER);
}
else {
myExpandByCombo.setSelectedItem(SPACE);
}
CommandProcessor.getInstance().executeCommand(
null, new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
final Document document = myTemplateEditor.getDocument();
document.replaceString(0, document.getTextLength(), myTemplate.getString());
}
});
}
},
"",
null
);
myCbReformat.setSelected(myTemplate.isToReformat());
myCbReformat.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
myTemplate.setToReformat(myCbReformat.isSelected());
}
});
myExpandByCombo.setEnabled(isExpandableFromEditor());
updateHighlighter();
validateEditVariablesButton();
}
private void editVariables() {
ArrayList<Variable> newVariables = updateVariablesByTemplateText();
EditVariableDialog editVariableDialog = new EditVariableDialog(myTemplateEditor, myEditVariablesButton, newVariables, getApplicableContexts());
editVariableDialog.show();
if (editVariableDialog.isOK()) {
applyVariables(newVariables);
}
}
private ArrayList<Variable> updateVariablesByTemplateText() {
List<Variable> oldVariables = getCurrentVariables();
Set<String> oldVariableNames = ContainerUtil.map2Set(oldVariables, new Function<Variable, String>() {
@Override
public String fun(Variable variable) {
return variable.getName();
}
});
ArrayList<Variable> parsedVariables = parseVariables(myTemplateEditor.getDocument().getCharsSequence());
Map<String,String> newVariableNames = new HashMap<String, String>();
for (Object parsedVariable : parsedVariables) {
Variable newVariable = (Variable)parsedVariable;
String name = newVariable.getName();
newVariableNames.put(name, name);
}
int oldVariableNumber = 0;
for(int i = 0; i < parsedVariables.size(); i++){
Variable variable = parsedVariables.get(i);
if(oldVariableNames.contains(variable.getName())) {
Variable oldVariable = null;
for(;oldVariableNumber<oldVariables.size(); oldVariableNumber++) {
oldVariable = oldVariables.get(oldVariableNumber);
if(newVariableNames.get(oldVariable.getName()) != null) {
break;
}
oldVariable = null;
}
oldVariableNumber++;
if(oldVariable != null) {
parsedVariables.set(i, oldVariable);
}
}
}
return parsedVariables;
}
private List<Variable> getCurrentVariables() {
List<Variable> myVariables = new ArrayList<Variable>();
for(int i = 0; i < myTemplate.getVariableCount(); i++) {
myVariables.add(new Variable(myTemplate.getVariableNameAt(i),
myTemplate.getExpressionStringAt(i),
myTemplate.getDefaultValueStringAt(i),
myTemplate.isAlwaysStopAt(i)));
}
return myVariables;
}
public JTextField getKeyField() {
return myKeyField;
}
public void focusKey() {
myKeyField.selectAll();
//todo[peter,kirillk] without these invokeLaters this requestFocus conflicts with com.intellij.openapi.ui.impl.DialogWrapperPeerImpl.MyDialog.MyWindowListener.windowOpened()
IdeFocusManager.findInstanceByComponent(myKeyField).requestFocus(myKeyField, true);
final ModalityState modalityState = ModalityState.stateForComponent(myKeyField);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
IdeFocusManager.findInstanceByComponent(myKeyField).requestFocus(myKeyField, true);
}
}, modalityState);
}
}, modalityState);
}
}, modalityState);
}
private static ArrayList<Variable> parseVariables(CharSequence text) {
ArrayList<Variable> variables = new ArrayList<Variable>();
TemplateImplUtil.parseVariables(text, variables, TemplateImpl.INTERNAL_VARS_SET);
return variables;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.ipRange;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
/**
*
*/
@ElasticsearchIntegrationTest.SuiteScopeTest
public class IPv4RangeTests extends ElasticsearchIntegrationTest {
@Override
public void setupSuiteScopeCluster() throws Exception {
{
assertAcked(prepareCreate("idx")
.addMapping("type", "ip", "type=ip", "ips", "type=ip"));
IndexRequestBuilder[] builders = new IndexRequestBuilder[255]; // TODO randomize the size?
// TODO randomize the values in the docs?
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder()
.startObject()
.field("ip", "10.0.0." + (i))
.startArray("ips").value("10.0.0." + i).value("10.0.0." + (i + 1)).endArray()
.field("value", (i < 100 ? 1 : i < 200 ? 2 : 3)) // 100 1's, 100 2's, and 55 3's
.endObject());
}
indexRandom(true, builders);
createIndex("idx_unmapped");
}
{
assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "ip", "type=ip"));
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < 2; i++) {
builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder()
.startObject()
.field("value", i * 2)
.field("ip", "10.0.0.5")
.endObject()));
}
indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
}
ensureSearchable();
}
@Test
public void singleValueField() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(ipRange("range")
.field("ip")
.addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200"))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(buckets.size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) (String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) ((Number) bucket.getFrom())).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) ((Number) bucket.getTo())).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) (String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) ((Number) bucket.getFrom())).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) ((Number) bucket.getTo())).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) (String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@Test
public void singleValueField_WithMaskRange() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(ipRange("range")
.field("ip")
.addMaskRange("10.0.0.0/25")
.addMaskRange("10.0.0.128/25"))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(2));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.0/25"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.0")));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.0"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.128")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.128"));
assertThat(bucket.getDocCount(), equalTo(128l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.128/25"));
assertThat((long) ((Number) bucket.getFrom()).doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.128")));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.128"));
assertThat((long) ((Number) bucket.getTo()).doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.1.0"))); // range is exclusive on the to side
assertThat(bucket.getToAsString(), equalTo("10.0.1.0"));
assertThat(bucket.getDocCount(), equalTo(127l)); // include 10.0.0.128
}
@Test
public void singleValueField_WithCustomKey() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(ipRange("range")
.field("ip")
.addUnboundedTo("r1", "10.0.0.100")
.addRange("r2", "10.0.0.100", "10.0.0.200")
.addUnboundedFrom("r3", "10.0.0.200"))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r1"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r2"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r3"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@Test
public void singleValuedField_WithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(ipRange("range")
.field("ip")
.addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200")
.subAggregation(sum("sum").field("value")))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Object[] propertiesKeys = (Object[]) range.getProperty("_key");
Object[] propertiesDocCounts = (Object[]) range.getProperty("_count");
Object[] propertiesCounts = (Object[]) range.getProperty("sum.value");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo((double) 100));
assertThat((String) propertiesKeys[0], equalTo("*-10.0.0.100"));
assertThat((long) propertiesDocCounts[0], equalTo(100l));
assertThat((double) propertiesCounts[0], equalTo((double) 100));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo((double) 200));
assertThat((String) propertiesKeys[1], equalTo("10.0.0.100-10.0.0.200"));
assertThat((long) propertiesDocCounts[1], equalTo(100l));
assertThat((double) propertiesCounts[1], equalTo((double) 200));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo((double) 55*3));
assertThat((String) propertiesKeys[2], equalTo("10.0.0.200-*"));
assertThat((long) propertiesDocCounts[2], equalTo(55l));
assertThat((double) propertiesCounts[2], equalTo((double) 55 * 3));
}
@Test
public void singleValuedField_WithSubAggregation_Inherited() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(ipRange("range")
.field("ip")
.addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200")
.subAggregation(max("max")))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.99")));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.199")));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.254")));
}
@Test
public void singleValuedField_WithValueScript() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").field("ip").script(new Script("_value")).addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
/*
[0, 1]
[1, 2]
[2, 3]
...
[99, 100]
[100, 101]
[101, 102]
...
[199, 200]
[200, 201]
[201, 202]
...
[254, 255]
[255, 256]
*/
@Test
public void multiValuedField() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(ipRange("range")
.field("ips")
.addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200"))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
@Test
public void multiValuedField_WithValueScript() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").field("ips").script(new Script("_value")).addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
@Test
public void multiValuedField_WithValueScript_WithInheritedSubAggregator() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").field("ips").script(new Script("_value")).addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200").subAggregation(max("max"))).execute()
.actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.100")));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.200")));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.255")));
}
@Test
public void script_SingleValue() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script(new Script("doc['ip'].value")).addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@Test
public void script_SingleValue_WithSubAggregator_Inherited() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script(new Script("doc['ip'].value")).addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200").subAggregation(max("max"))).execute()
.actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.99")));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.199")));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.254")));
}
@Test
public void script_MultiValued() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script(new Script("doc['ips'].values")).addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
@Test
public void script_MultiValued_WithAggregatorInherited() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script(new Script("doc['ips'].values")).addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200").subAggregation(max("max"))).execute()
.actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.100")));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.200")));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.255")));
}
@Test
public void unmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx_unmapped")
.addAggregation(ipRange("range")
.field("ip")
.addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200"))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0l));
}
@Test
public void partiallyUnmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
.addAggregation(ipRange("range")
.field("ip")
.addUnboundedTo("10.0.0.100")
.addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200"))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@Test
public void emptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
.addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
.subAggregation(ipRange("ip_range").field("ip").addRange("r1", "10.0.0.1", "10.0.0.10")))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Range range = bucket.getAggregations().get("ip_range");
// TODO: use diamond once JI-9019884 is fixed
List<Range.Bucket> buckets = new ArrayList<Range.Bucket>(range.getBuckets());
assertThat(range, Matchers.notNullValue());
assertThat(range.getName(), equalTo("ip_range"));
assertThat(buckets.size(), is(1));
assertThat((String) buckets.get(0).getKey(), equalTo("r1"));
assertThat(buckets.get(0).getFromAsString(), equalTo("10.0.0.1"));
assertThat(buckets.get(0).getToAsString(), equalTo("10.0.0.10"));
assertThat(buckets.get(0).getDocCount(), equalTo(0l));
}
/*
* TODO Remove in 2.0
*/
@Test
public void singleValuedField_WithValueScriptOldScriptAPI() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").field("ip").script("_value").addUnboundedTo("10.0.0.100").addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
/*
* TODO Remove in 2.0
*/
@Test
public void multiValuedField_WithValueScriptOldScriptAPI() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").field("ips").script("_value").addUnboundedTo("10.0.0.100").addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
/*
* TODO Remove in 2.0
*/
@Test
public void multiValuedField_WithValueScript_WithInheritedSubAggregatorOldScriptAPI() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").field("ips").script("_value").addUnboundedTo("10.0.0.100").addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200").subAggregation(max("max"))).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.100")));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.200")));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.255")));
}
/*
* TODO Remove in 2.0
*/
@Test
public void script_SingleValueOldScriptAPI() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script("doc['ip'].value").addUnboundedTo("10.0.0.100").addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
/*
* TODO Remove in 2.0
*/
@Test
public void script_SingleValue_WithSubAggregator_InheritedOldScriptAPI() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script("doc['ip'].value").addUnboundedTo("10.0.0.100").addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200").subAggregation(max("max"))).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.99")));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.199")));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.254")));
}
/*
* TODO Remove in 2.0
*/
@Test
public void script_MultiValuedOldScriptAPI() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script("doc['ips'].values").addUnboundedTo("10.0.0.100").addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200")).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
/*
* TODO Remove in 2.0
*/
@Test
public void script_MultiValued_WithAggregatorInheritedOldScriptAPI() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
ipRange("range").script("doc['ips'].values").addUnboundedTo("10.0.0.100").addRange("10.0.0.100", "10.0.0.200")
.addUnboundedFrom("10.0.0.200").subAggregation(max("max"))).execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.100")));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.200")));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.255")));
}
}
| |
package org.springframework.security.taglibs.authz;
import java.io.IOException;
import java.util.*;
import javax.servlet.ServletContext;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.PageContext;
import javax.servlet.jsp.tagext.Tag;
import org.springframework.expression.BeanResolver;
import org.springframework.expression.ConstructorResolver;
import org.springframework.expression.EvaluationContext;
import org.springframework.expression.MethodResolver;
import org.springframework.expression.OperatorOverloader;
import org.springframework.expression.PropertyAccessor;
import org.springframework.expression.TypeComparator;
import org.springframework.expression.TypeConverter;
import org.springframework.expression.TypeLocator;
import org.springframework.expression.TypedValue;
import org.springframework.security.access.expression.SecurityExpressionHandler;
import org.springframework.security.taglibs.TagLibConfig;
import org.springframework.security.web.FilterInvocation;
/**
* A JSP {@link Tag} implementation of {@link AbstractAuthorizeTag}.
*
* @author Rossen Stoyanchev
* @see AbstractAuthorizeTag
* @since 3.1.0
*/
public class JspAuthorizeTag extends AbstractAuthorizeTag implements Tag {
private Tag parent;
protected PageContext pageContext;
protected String id;
private String var;
private boolean authorized;
/**
* Invokes the base class {@link AbstractAuthorizeTag#authorize()} method to decide if
* the body of the tag should be skipped or not.
*
* @return {@link Tag#SKIP_BODY} or {@link Tag#EVAL_BODY_INCLUDE}
*/
public int doStartTag() throws JspException {
try {
authorized = super.authorize();
if (!authorized && TagLibConfig.isUiSecurityDisabled()) {
pageContext.getOut().write(TagLibConfig.getSecuredUiPrefix());
}
if (var != null) {
pageContext.setAttribute(var, authorized, PageContext.PAGE_SCOPE);
}
return TagLibConfig.evalOrSkip(authorized);
}
catch (IOException e) {
throw new JspException(e);
}
}
@Override
protected EvaluationContext createExpressionEvaluationContext(
SecurityExpressionHandler<FilterInvocation> handler) {
return new PageContextVariableLookupEvaluationContext(
super.createExpressionEvaluationContext(handler));
}
/**
* Default processing of the end tag returning EVAL_PAGE.
*
* @return EVAL_PAGE
* @see Tag#doEndTag()
*/
public int doEndTag() throws JspException {
try {
if (!authorized && TagLibConfig.isUiSecurityDisabled()) {
pageContext.getOut().write(TagLibConfig.getSecuredUiSuffix());
}
}
catch (IOException e) {
throw new JspException(e);
}
return EVAL_PAGE;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Tag getParent() {
return parent;
}
public void setParent(Tag parent) {
this.parent = parent;
}
public String getVar() {
return var;
}
public void setVar(String var) {
this.var = var;
}
public void release() {
parent = null;
id = null;
}
public void setPageContext(PageContext pageContext) {
this.pageContext = pageContext;
}
@Override
protected ServletRequest getRequest() {
return pageContext.getRequest();
}
@Override
protected ServletResponse getResponse() {
return pageContext.getResponse();
}
@Override
protected ServletContext getServletContext() {
return pageContext.getServletContext();
}
private final class PageContextVariableLookupEvaluationContext implements
EvaluationContext {
private EvaluationContext delegate;
private PageContextVariableLookupEvaluationContext(EvaluationContext delegate) {
this.delegate = delegate;
}
public TypedValue getRootObject() {
return delegate.getRootObject();
}
public List<ConstructorResolver> getConstructorResolvers() {
return delegate.getConstructorResolvers();
}
public List<MethodResolver> getMethodResolvers() {
return delegate.getMethodResolvers();
}
public List<PropertyAccessor> getPropertyAccessors() {
return delegate.getPropertyAccessors();
}
public TypeLocator getTypeLocator() {
return delegate.getTypeLocator();
}
public TypeConverter getTypeConverter() {
return delegate.getTypeConverter();
}
public TypeComparator getTypeComparator() {
return delegate.getTypeComparator();
}
public OperatorOverloader getOperatorOverloader() {
return delegate.getOperatorOverloader();
}
public BeanResolver getBeanResolver() {
return delegate.getBeanResolver();
}
public void setVariable(String name, Object value) {
delegate.setVariable(name, value);
}
public Object lookupVariable(String name) {
Object result = delegate.lookupVariable(name);
if (result == null) {
result = pageContext.findAttribute(name);
}
return result;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.user;
import com.google.common.collect.Iterables;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.oak.AbstractSecurityTest;
import org.apache.jackrabbit.oak.InitialContent;
import org.apache.jackrabbit.oak.Oak;
import org.apache.jackrabbit.oak.api.ContentRepository;
import org.apache.jackrabbit.oak.api.ContentSession;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexProvider;
import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider;
import org.apache.jackrabbit.oak.plugins.tree.TreeUtil;
import org.apache.jackrabbit.oak.security.internal.SecurityProviderBuilder;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.SystemSubject;
import org.apache.jackrabbit.oak.spi.security.principal.AdminPrincipal;
import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration;
import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
import org.apache.jackrabbit.oak.spi.security.user.util.UserUtil;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.junit.Before;
import org.junit.Test;
import javax.jcr.GuestCredentials;
import javax.jcr.SimpleCredentials;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
/**
* @since OAK 1.0
*/
public class UserInitializerTest extends AbstractSecurityTest {
private UserManager userMgr;
private ConfigurationParameters config;
@Override
@Before
public void before() throws Exception {
super.before();
userMgr = getUserManager(root);
config = getUserConfiguration().getParameters();
}
@Test
public void testBuildInUserExist() throws Exception {
assertNotNull(userMgr.getAuthorizable(UserUtil.getAdminId(config)));
assertNotNull(userMgr.getAuthorizable(UserUtil.getAnonymousId(config)));
}
@Test
public void testAdminUser() throws Exception {
Authorizable a = userMgr.getAuthorizable(UserUtil.getAdminId(config));
assertFalse(a.isGroup());
User admin = (User) a;
assertTrue(admin.isAdmin());
assertTrue(admin.getPrincipal() instanceof AdminPrincipal);
assertTrue(admin.getPrincipal() instanceof TreeBasedPrincipal);
assertEquals(admin.getID(), admin.getPrincipal().getName());
}
@Test
public void testAnonymous() throws Exception {
Authorizable a = userMgr.getAuthorizable(UserUtil.getAnonymousId(config));
assertFalse(a.isGroup());
User anonymous = (User) a;
assertFalse(anonymous.isAdmin());
assertFalse(anonymous.getPrincipal() instanceof AdminPrincipal);
assertTrue(anonymous.getPrincipal() instanceof TreeBasedPrincipal);
assertEquals(anonymous.getID(), anonymous.getPrincipal().getName());
}
@Test
public void testUserContent() throws Exception {
Authorizable a = userMgr.getAuthorizable(UserUtil.getAdminId(config));
assertTrue(root.getTree(a.getPath()).exists());
a = userMgr.getAuthorizable(UserUtil.getAnonymousId(config));
assertTrue(root.getTree(a.getPath()).exists());
}
@Test
public void testUserIndexDefinitions() {
Tree oakIndex = root.getTree('/' + IndexConstants.INDEX_DEFINITIONS_NAME);
assertTrue(oakIndex.exists());
Tree id = oakIndex.getChild("authorizableId");
assertIndexDefinition(id, UserConstants.REP_AUTHORIZABLE_ID, true);
Tree princName = oakIndex.getChild("principalName");
assertIndexDefinition(princName, UserConstants.REP_PRINCIPAL_NAME, true);
Iterable<String> declaringNtNames = TreeUtil.getStrings(princName, IndexConstants.DECLARING_NODE_TYPES);
assertArrayEquals(
new String[]{UserConstants.NT_REP_AUTHORIZABLE},
Iterables.toArray(declaringNtNames, String.class));
Tree repMembers = oakIndex.getChild("repMembers");
assertIndexDefinition(repMembers, UserConstants.REP_MEMBERS, false);
declaringNtNames = TreeUtil.getStrings(repMembers, IndexConstants.DECLARING_NODE_TYPES);
assertArrayEquals(
new String[]{UserConstants.NT_REP_MEMBER_REFERENCES},
Iterables.toArray(declaringNtNames, String.class));
}
private static void assertIndexDefinition(Tree tree, String propName, boolean isUnique) {
assertTrue(tree.exists());
assertEquals(isUnique, TreeUtil.getBoolean(tree, IndexConstants.UNIQUE_PROPERTY_NAME));
assertArrayEquals(
propName, new String[]{propName},
Iterables.toArray(TreeUtil.getStrings(tree, IndexConstants.PROPERTY_NAMES), String.class));
}
/**
* @since OAK 1.0 The configuration defines if the password of the
* admin user is being set.
*/
@Test
public void testAdminConfiguration() throws Exception {
Map<String,Object> userParams = new HashMap<>();
userParams.put(UserConstants.PARAM_ADMIN_ID, "admin");
userParams.put(UserConstants.PARAM_OMIT_ADMIN_PW, true);
ConfigurationParameters params = ConfigurationParameters.of(UserConfiguration.NAME, ConfigurationParameters.of(userParams));
SecurityProvider sp = SecurityProviderBuilder.newBuilder().with(params).build();
final ContentRepository repo = new Oak().with(new InitialContent())
.with(new PropertyIndexEditorProvider())
.with(new PropertyIndexProvider())
.with(new TypeEditorProvider())
.with(sp)
.createContentRepository();
try (ContentSession cs = Subject.doAs(SystemSubject.INSTANCE, (PrivilegedExceptionAction<ContentSession>) () -> repo.login(null, null))) {
Root root = cs.getLatestRoot();
UserConfiguration uc = sp.getConfiguration(UserConfiguration.class);
UserManager umgr = uc.getUserManager(root, NamePathMapper.DEFAULT);
Authorizable adminUser = umgr.getAuthorizable("admin");
assertNotNull(adminUser);
Tree adminTree = root.getTree(adminUser.getPath());
assertTrue(adminTree.exists());
assertNull(adminTree.getProperty(UserConstants.REP_PASSWORD));
}
// login as admin should fail
try (ContentSession adminSession = repo.login(new SimpleCredentials("admin", new char[0]), null)) {
fail();
} catch (LoginException e) {
//success
}
}
/**
* @since OAK 1.0 The anonymous user is optional.
*/
@Test
public void testAnonymousConfiguration() throws Exception {
Map<String,Object> userParams = new HashMap<>();
userParams.put(UserConstants.PARAM_ANONYMOUS_ID, "");
ConfigurationParameters params = ConfigurationParameters.of(UserConfiguration.NAME, ConfigurationParameters.of(userParams));
SecurityProvider sp = SecurityProviderBuilder.newBuilder().with(params).build();
final ContentRepository repo = new Oak().with(new InitialContent())
.with(new PropertyIndexEditorProvider())
.with(new PropertyIndexProvider())
.with(new TypeEditorProvider())
.with(sp)
.createContentRepository();
try (ContentSession cs = Subject.doAs(SystemSubject.INSTANCE, (PrivilegedExceptionAction<ContentSession>) () -> repo.login(null, null))) {
Root root = cs.getLatestRoot();
UserConfiguration uc = sp.getConfiguration(UserConfiguration.class);
UserManager umgr = uc.getUserManager(root, NamePathMapper.DEFAULT);
Authorizable anonymous = umgr.getAuthorizable(UserConstants.DEFAULT_ANONYMOUS_ID);
assertNull(anonymous);
}
// login as admin should fail
try (ContentSession anonymousSession = repo.login(new GuestCredentials(), null)) {
fail();
} catch (LoginException e) {
//success
}
}
@Test
public void testSecondInit() {
NodeBuilder builder = spy(getTreeProvider().asNodeState(root.getTree(PathUtils.ROOT_PATH)).builder());
UserInitializer ui = new UserInitializer(getSecurityProvider());
ui.initialize(builder, adminSession.getWorkspaceName());
verify(builder, never()).child(anyString());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.parser;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.runtime.CalciteContextException;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlUnresolvedFunction;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.util.Glossary;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.io.Reader;
import java.io.StringReader;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Abstract base for parsers generated from CommonParser.jj.
*/
public abstract class SqlAbstractParserImpl {
//~ Static fields/initializers ---------------------------------------------
private static final ImmutableSet<String> SQL_92_RESERVED_WORD_SET =
ImmutableSet.of(
"ABSOLUTE",
"ACTION",
"ADD",
"ALL",
"ALLOCATE",
"ALTER",
"AND",
"ANY",
"ARE",
"AS",
"ASC",
"ASSERTION",
"AT",
"AUTHORIZATION",
"AVG",
"BEGIN",
"BETWEEN",
"BIT",
"BIT_LENGTH",
"BOTH",
"BY",
"CASCADE",
"CASCADED",
"CASE",
"CAST",
"CATALOG",
"CHAR",
"CHARACTER",
"CHARACTER_LENGTH",
"CHAR_LENGTH",
"CHECK",
"CLOSE",
"COALESCE",
"COLLATE",
"COLLATION",
"COLUMN",
"COMMIT",
"CONNECT",
"CONNECTION",
"CONSTRAINT",
"CONSTRAINTS",
"CONTINUE",
"CONVERT",
"CORRESPONDING",
"COUNT",
"CREATE",
"CROSS",
"CURRENT",
"CURRENT_DATE",
"CURRENT_TIME",
"CURRENT_TIMESTAMP",
"CURRENT_USER",
"CURSOR",
"DATE",
"DAY",
"DEALLOCATE",
"DEC",
"DECIMAL",
"DECLARE",
"DEFAULT",
"DEFERRABLE",
"DEFERRED",
"DELETE",
"DESC",
"DESCRIBE",
"DESCRIPTOR",
"DIAGNOSTICS",
"DISCONNECT",
"DISTINCT",
"DOMAIN",
"DOUBLE",
"DROP",
"ELSE",
"END",
"END-EXEC",
"ESCAPE",
"EXCEPT",
"EXCEPTION",
"EXEC",
"EXECUTE",
"EXISTS",
"EXTERNAL",
"EXTRACT",
"FALSE",
"FETCH",
"FIRST",
"FLOAT",
"FOR",
"FOREIGN",
"FOUND",
"FROM",
"FULL",
"GET",
"GLOBAL",
"GO",
"GOTO",
"GRANT",
"GROUP",
"HAVING",
"HOUR",
"IDENTITY",
"IMMEDIATE",
"IN",
"INDICATOR",
"INITIALLY",
"INNER",
"INADD",
"INSENSITIVE",
"INSERT",
"INT",
"INTEGER",
"INTERSECT",
"INTERVAL",
"INTO",
"IS",
"ISOLATION",
"JOIN",
"KEY",
"LANGUAGE",
"LAST",
"LEADING",
"LEFT",
"LEVEL",
"LIKE",
"LOCAL",
"LOWER",
"MATCH",
"MAX",
"MIN",
"MINUTE",
"MODULE",
"MONTH",
"NAMES",
"NATIONAL",
"NATURAL",
"NCHAR",
"NEXT",
"NO",
"NOT",
"NULL",
"NULLIF",
"NUMERIC",
"OCTET_LENGTH",
"OF",
"ON",
"ONLY",
"OPEN",
"OPTION",
"OR",
"ORDER",
"OUTER",
"OUTADD",
"OVERLAPS",
"PAD",
"PARTIAL",
"POSITION",
"PRECISION",
"PREPARE",
"PRESERVE",
"PRIMARY",
"PRIOR",
"PRIVILEGES",
"PROCEDURE",
"PUBLIC",
"READ",
"REAL",
"REFERENCES",
"RELATIVE",
"RESTRICT",
"REVOKE",
"RIGHT",
"ROLLBACK",
"ROWS",
"SCHEMA",
"SCROLL",
"SECOND",
"SECTION",
"SELECT",
"SESSION",
"SESSION_USER",
"SET",
"SIZE",
"SMALLINT",
"SOME",
"SPACE",
"SQL",
"SQLCODE",
"SQLERROR",
"SQLSTATE",
"SUBSTRING",
"SUM",
"SYSTEM_USER",
"TABLE",
"TEMPORARY",
"THEN",
"TIME",
"TIMESTAMP",
"TIMEZONE_HOUR",
"TIMEZONE_MINUTE",
"TO",
"TRAILING",
"TRANSACTION",
"TRANSLATE",
"TRANSLATION",
"TRIM",
"TRUE",
"UNION",
"UNIQUE",
"UNKNOWN",
"UPDATE",
"UPPER",
"USAGE",
"USER",
"USING",
"VALUE",
"VALUES",
"VARCHAR",
"VARYING",
"VIEW",
"WHEN",
"WHENEVER",
"WHERE",
"WITH",
"WORK",
"WRITE",
"YEAR",
"ZONE");
//~ Enums ------------------------------------------------------------------
/**
* Type-safe enum for context of acceptable expressions.
*/
protected enum ExprContext {
/**
* Accept any kind of expression in this context.
*/
ACCEPT_ALL,
/**
* Accept any kind of expression in this context, with the exception of
* CURSOR constructors.
*/
ACCEPT_NONCURSOR,
/**
* Accept only query expressions in this context.
*/
ACCEPT_QUERY,
/**
* Accept only non-query expressions in this context.
*/
ACCEPT_NON_QUERY,
/**
* Accept only parenthesized queries or non-query expressions in this
* context.
*/
ACCEPT_SUB_QUERY,
/**
* Accept only CURSOR constructors, parenthesized queries, or non-query
* expressions in this context.
*/
ACCEPT_CURSOR;
@Deprecated // to be removed before 2.0
public static final ExprContext ACCEPT_SUBQUERY = ACCEPT_SUB_QUERY;
@Deprecated // to be removed before 2.0
public static final ExprContext ACCEPT_NONQUERY = ACCEPT_NON_QUERY;
}
//~ Instance fields --------------------------------------------------------
protected int nDynamicParams;
protected String originalSql;
protected final List<CalciteContextException> warnings = new ArrayList<>();
//~ Methods ----------------------------------------------------------------
/**
* Returns immutable set of all reserved words defined by SQL-92.
*
* @see Glossary#SQL92 SQL-92 Section 5.2
*/
public static Set<String> getSql92ReservedWords() {
return SQL_92_RESERVED_WORD_SET;
}
/**
* Creates a call.
*
* @param funName Name of function
* @param pos Position in source code
* @param funcType Type of function
* @param functionQualifier Qualifier
* @param operands Operands to call
* @return Call
*/
protected SqlCall createCall(
SqlIdentifier funName,
SqlParserPos pos,
SqlFunctionCategory funcType,
SqlLiteral functionQualifier,
Iterable<? extends SqlNode> operands) {
return createCall(funName, pos, funcType, functionQualifier,
Iterables.toArray(operands, SqlNode.class));
}
/**
* Creates a call.
*
* @param funName Name of function
* @param pos Position in source code
* @param funcType Type of function
* @param functionQualifier Qualifier
* @param operands Operands to call
* @return Call
*/
protected SqlCall createCall(
SqlIdentifier funName,
SqlParserPos pos,
SqlFunctionCategory funcType,
SqlLiteral functionQualifier,
SqlNode[] operands) {
// Create a placeholder function. Later, during
// validation, it will be resolved into a real function reference.
SqlOperator fun = new SqlUnresolvedFunction(funName, null, null, null, null,
funcType);
return fun.createCall(functionQualifier, pos, operands);
}
/**
* Returns metadata about this parser: keywords, etc.
*/
public abstract Metadata getMetadata();
/**
* Removes or transforms misleading information from a parse exception or
* error, and converts to {@link SqlParseException}.
*
* @param ex dirty excn
* @return clean excn
*/
public abstract SqlParseException normalizeException(Throwable ex);
protected abstract SqlParserPos getPos() throws Exception;
/**
* Reinitializes parser with new input.
*
* @param reader provides new input
*/
// CHECKSTYLE: IGNORE 1
public abstract void ReInit(Reader reader);
/**
* Parses a SQL expression ending with EOF and constructs a
* parse tree.
*
* @return constructed parse tree.
*/
public abstract SqlNode parseSqlExpressionEof() throws Exception;
/**
* Parses a SQL statement ending with EOF and constructs a
* parse tree.
*
* @return constructed parse tree.
*/
public abstract SqlNode parseSqlStmtEof() throws Exception;
/**
* Parses a list of SQL statements separated by semicolon and constructs a
* parse tree. The semicolon is required between statements, but is
* optional at the end.
*
* @return constructed list of SQL statements.
*/
public abstract SqlNodeList parseSqlStmtList() throws Exception;
/**
* Sets the tab stop size.
*
* @param tabSize Tab stop size
*/
public abstract void setTabSize(int tabSize);
/**
* Sets the casing policy for quoted identifiers.
*
* @param quotedCasing Casing to set.
*/
public abstract void setQuotedCasing(Casing quotedCasing);
/**
* Sets the casing policy for unquoted identifiers.
*
* @param unquotedCasing Casing to set.
*/
public abstract void setUnquotedCasing(Casing unquotedCasing);
/**
* Sets the maximum length for sql identifier.
*/
public abstract void setIdentifierMaxLength(int identifierMaxLength);
/**
* Sets the SQL language conformance level.
*/
public abstract void setConformance(SqlConformance conformance);
/**
* Sets the SQL text that is being parsed.
*/
public void setOriginalSql(String originalSql) {
this.originalSql = originalSql;
}
/**
* Returns the SQL text.
*/
public String getOriginalSql() {
return originalSql;
}
/**
* Change parser state.
*
* @param stateName new state.
*/
public abstract void switchTo(String stateName);
//~ Inner Interfaces -------------------------------------------------------
/**
* Metadata about the parser. For example:
*
* <ul>
* <li>"KEY" is a keyword: it is meaningful in certain contexts, such as
* "CREATE FOREIGN KEY", but can be used as an identifier, as in <code>
* "CREATE TABLE t (key INTEGER)"</code>.
* <li>"SELECT" is a reserved word. It can not be used as an identifier.
* <li>"CURRENT_USER" is the name of a context variable. It cannot be used
* as an identifier.
* <li>"ABS" is the name of a reserved function. It cannot be used as an
* identifier.
* <li>"DOMAIN" is a reserved word as specified by the SQL:92 standard.
* </ul>
*/
public interface Metadata {
/**
* Returns true if token is a keyword but not a reserved word. For
* example, "KEY".
*/
boolean isNonReservedKeyword(String token);
/**
* Returns whether token is the name of a context variable such as
* "CURRENT_USER".
*/
boolean isContextVariableName(String token);
/**
* Returns whether token is a reserved function name such as
* "CURRENT_USER".
*/
boolean isReservedFunctionName(String token);
/**
* Returns whether token is a keyword. (That is, a non-reserved keyword,
* a context variable, or a reserved function name.)
*/
boolean isKeyword(String token);
/**
* Returns whether token is a reserved word.
*/
boolean isReservedWord(String token);
/**
* Returns whether token is a reserved word as specified by the SQL:92
* standard.
*/
boolean isSql92ReservedWord(String token);
/**
* Returns comma-separated list of JDBC keywords.
*/
String getJdbcKeywords();
/**
* Returns a list of all tokens in alphabetical order.
*/
List<String> getTokens();
}
//~ Inner Classes ----------------------------------------------------------
/**
* Default implementation of the {@link Metadata} interface.
*/
public static class MetadataImpl implements Metadata {
private final Set<String> reservedFunctionNames = new HashSet<>();
private final Set<String> contextVariableNames = new HashSet<>();
private final Set<String> nonReservedKeyWordSet = new HashSet<>();
/**
* Set of all tokens.
*/
private final SortedSet<String> tokenSet = new TreeSet<>();
/**
* Immutable list of all tokens, in alphabetical order.
*/
private final List<String> tokenList;
private final Set<String> reservedWords = new HashSet<>();
private final String sql92ReservedWords;
/**
* Creates a MetadataImpl.
*
* @param sqlParser Parser
*/
public MetadataImpl(SqlAbstractParserImpl sqlParser) {
initList(sqlParser, reservedFunctionNames, "ReservedFunctionName");
initList(sqlParser, contextVariableNames, "ContextVariable");
initList(sqlParser, nonReservedKeyWordSet, "NonReservedKeyWord");
tokenList = ImmutableList.copyOf(tokenSet);
sql92ReservedWords = constructSql92ReservedWordList();
Set<String> reservedWordSet = new TreeSet<>();
reservedWordSet.addAll(tokenSet);
reservedWordSet.removeAll(nonReservedKeyWordSet);
reservedWords.addAll(reservedWordSet);
}
/**
* Initializes lists of keywords.
*/
private void initList(
SqlAbstractParserImpl parserImpl,
Set<String> keywords,
String name) {
parserImpl.ReInit(new StringReader("1"));
try {
Object o = virtualCall(parserImpl, name);
throw new AssertionError("expected call to fail, got " + o);
} catch (SqlParseException parseException) {
// First time through, build the list of all tokens.
final String[] tokenImages = parseException.getTokenImages();
if (tokenSet.isEmpty()) {
for (String token : tokenImages) {
String tokenVal = SqlParserUtil.getTokenVal(token);
if (tokenVal != null) {
tokenSet.add(tokenVal);
}
}
}
// Add the tokens which would have been expected in this
// syntactic context to the list we're building.
final int[][] expectedTokenSequences =
parseException.getExpectedTokenSequences();
for (final int[] tokens : expectedTokenSequences) {
assert tokens.length == 1;
final int tokenId = tokens[0];
String token = tokenImages[tokenId];
String tokenVal = SqlParserUtil.getTokenVal(token);
if (tokenVal != null) {
keywords.add(tokenVal);
}
}
} catch (Throwable e) {
throw new RuntimeException("While building token lists", e);
}
}
/**
* Uses reflection to invoke a method on this parser. The method must be
* public and have no parameters.
*
* @param parserImpl Parser
* @param name Name of method. For example "ReservedFunctionName".
* @return Result of calling method
*/
private Object virtualCall(
SqlAbstractParserImpl parserImpl,
String name) throws Throwable {
Class<?> clazz = parserImpl.getClass();
try {
final Method method = clazz.getMethod(name, (Class[]) null);
return method.invoke(parserImpl, (Object[]) null);
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
throw parserImpl.normalizeException(cause);
}
}
/**
* Builds a comma-separated list of JDBC reserved words.
*/
private String constructSql92ReservedWordList() {
StringBuilder sb = new StringBuilder();
TreeSet<String> jdbcReservedSet = new TreeSet<>();
jdbcReservedSet.addAll(tokenSet);
jdbcReservedSet.removeAll(SQL_92_RESERVED_WORD_SET);
jdbcReservedSet.removeAll(nonReservedKeyWordSet);
int j = 0;
for (String jdbcReserved : jdbcReservedSet) {
if (j++ > 0) {
sb.append(",");
}
sb.append(jdbcReserved);
}
return sb.toString();
}
public List<String> getTokens() {
return tokenList;
}
public boolean isSql92ReservedWord(String token) {
return SQL_92_RESERVED_WORD_SET.contains(token);
}
public String getJdbcKeywords() {
return sql92ReservedWords;
}
public boolean isKeyword(String token) {
return isNonReservedKeyword(token)
|| isReservedFunctionName(token)
|| isContextVariableName(token)
|| isReservedWord(token);
}
public boolean isNonReservedKeyword(String token) {
return nonReservedKeyWordSet.contains(token);
}
public boolean isReservedFunctionName(String token) {
return reservedFunctionNames.contains(token);
}
public boolean isContextVariableName(String token) {
return contextVariableNames.contains(token);
}
public boolean isReservedWord(String token) {
return reservedWords.contains(token);
}
}
}
// End SqlAbstractParserImpl.java
| |
/*
* Copyright (C) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package OptimizationTests.LoopPeeling.ShortLoop_01;
import OptimizationTests.LoopPeeling.shared.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public class Main {
public final int iterations = 10000;
public static int s_field1 = 33;
private double i_field1 = 0.00001234500012345d;
public final static int s_final_field1 = 100;
private volatile int i_volatile_field1 = 0;
public static int foo_inlined(int x) {
return x;
}
/* Peeling candidate: iget, 1 iteration - known at compile time, simple for loop */
public double test_01(int x) {
double sum = x;
for (int i = 0; i < 1; i++) {
sum = i_field1;
}
return sum;
}
/* Peeling candidate: iget, 1 iteration - unknown at compile time, simple for loop */
public double test_02(int x) {
double sum = x;
for (int i = 0; i < x - 9; i++) {
sum = i_field1;
}
return sum;
}
/* Peeling candidate: LoadString, 0 iterations - unknown at compile time, simple for loop */
public String test_03(int x) {
String sum = "";
for (int i = 0; i < x - 10; i++) {
sum = "abc";
}
return sum;
}
/* Peeling candidate: LoadString, Invoke of 0 iterations - unknown at compile time, do-while loop */
public String test_04(int x) {
String sum = "";
do {
sum = "abcd";;
} while (sum == null);
return sum;
}
/* 4 loops in a row with 1-2 iterations each, 2 sget (in particular, inherited class sget), 1 sput; Math.sqrt, non-constant increment */
public double test_05(int x) {
double sum = 0.0;
Cls1_ext.s_field2 = x;
Cls1_ext obj = new Cls1_ext();
double smallvalue = Double.MIN_VALUE;
for (int i = 1; i < iterations/1200; i *= 2) {
sum += Cls1.s_field2 + x + Cls1_ext.s_field2 + Math.sqrt(i);
double temp = smallvalue;
smallvalue *= obj.i_field1;
sum += obj.s_field2 + smallvalue;
smallvalue = temp;
}
for (int i = 1; i < iterations/1099; i += 2) {
sum += Cls1.s_field1 + x + Cls1_ext.s_field2 + Math.sqrt(i/2);
double temp = smallvalue;
smallvalue +=obj.i_field1;
sum += obj.i_field1 + smallvalue;
smallvalue = temp;
}
for (int i = iterations; i > 1; i /= 2000) {
sum += Cls1.s_field1 + x + Cls1_ext.s_field1 + Math.sqrt(i);
double temp = smallvalue*3;
smallvalue *= Cls1.s_field2;
sum += obj.s_field2 + smallvalue;
smallvalue = temp;
}
//+ sput, only 2 or 3 iterations
for (int i = iterations; i > 1; i -= iterations ) {
Cls1_ext.s_field2 = x + i;
sum += Cls1.s_field1 + x + Cls1_ext.s_field1 + Math.sqrt(i);
double temp = smallvalue*3;
smallvalue *= Cls1.s_field2;
sum += obj.s_field2 + smallvalue;
smallvalue = temp;
}
return sum;
}
/* float-point iterator, 1 iteration, sput, sget, class loading, aput, aget - a lot of peeling candidates */
public double test_06(int x) {
double sum = 0;
Cls1_ext[] obj_array = {new Cls1_ext(), new Cls1_ext()};
for (float i = 0.0f; i < iterations; i += Float.POSITIVE_INFINITY) {
obj_array[0] = new Cls1_ext();
obj_array[1].arr_field1[0] = -1;
double temp = obj_array[0].s_field1 + obj_array[1].s_field1 + obj_array[1].arr_field1[0];
sum += ((1.0+2.0*(-1.0)*(i%2))*temp * x)/(double)(2*i+1);
}
return sum*4;
}
/* float-point iterator, 0 iterations , inlined func call in loop header and body, LoadClass, NullCheck, sput, sget, class loading, aput, aget, sget/sput before and after loop, if-else - a lot of peeling candidates */
public double test_07(int x) {
double sum = 0;
Cls1_ext[] obj_array = {new Cls1_ext(), new Cls1_ext()};
obj_array[1].arr_field1[0] = 0;
for (float i = iterations; i > x + Float.NaN + foo_inlined(iterations); i -= 1.33f) {
obj_array[1].arr_field1[0] = -1;
double temp = obj_array[0].s_field1 + obj_array[1].s_field1 + foo_inlined(obj_array[1].arr_field1[0]);
sum += ((1.0+2.0*(-1.0)*(i%2))*temp * x)/(double)(2*i+1);
if (i > 10) {
obj_array[1].arr_field1[0] += 1;
}
}
obj_array[1].s_field1++;
return sum*4 + obj_array[1].s_field1++;
}
/* Parent environment update in loop header, 1 iteration; 1 loop is peeled */
public int test_08(int n) {
int testVar = 0;
Cls2 c = new Cls2(n);
Cls2 c1 = new Cls2(n*2);
boolean[] b = new boolean[n];
int m = n - 1;
for (int i = 0; i < c.getArrInlined(m).length - 8; i++) {
i++;
b[i] = true;
c1.count = 5;
testVar = c1.arr.length;
}
m++;
n++;
testVar++;
if (b[0] || c1.arr[0]) {
return n;
} else {
return n + 1;
}
}
public void runTests() {
Class c = new Main().getClass();
Method[] methods = c.getDeclaredMethods();
Method tempMethod;
for (int i = 1; i < methods.length; i++) {
int j = i;
while (j > 0 && methods[j-1].getName().compareTo(methods[j].getName()) > 0) {
tempMethod = methods[j];
methods[j] = methods[j-1];
methods[j-1] = tempMethod;
j = j - 1;
}
}
Object[] arr = {null};
for (Method m: methods){
if (m.getName().startsWith("test")){
try {
String names[] = c.getPackage().getName().split("\\.");
String testName = names[names.length-1];
System.out.println("Test "+testName+"; Subtest "+m.getName()+"; Result: "+ m.invoke(this, 10));
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
}
public static void main(String[] args)
{
new Main().runTests();
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFActionSetNwTtlVer14 implements OFActionSetNwTtl {
private static final Logger logger = LoggerFactory.getLogger(OFActionSetNwTtlVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 8;
private final static short DEFAULT_NW_TTL = (short) 0x0;
// OF message fields
private final short nwTtl;
//
// Immutable default instance
final static OFActionSetNwTtlVer14 DEFAULT = new OFActionSetNwTtlVer14(
DEFAULT_NW_TTL
);
// package private constructor - used by readers, builders, and factory
OFActionSetNwTtlVer14(short nwTtl) {
this.nwTtl = nwTtl;
}
// Accessors for OF message fields
@Override
public OFActionType getType() {
return OFActionType.SET_NW_TTL;
}
@Override
public short getNwTtl() {
return nwTtl;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFActionSetNwTtl.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFActionSetNwTtl.Builder {
final OFActionSetNwTtlVer14 parentMessage;
// OF message fields
private boolean nwTtlSet;
private short nwTtl;
BuilderWithParent(OFActionSetNwTtlVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFActionType getType() {
return OFActionType.SET_NW_TTL;
}
@Override
public short getNwTtl() {
return nwTtl;
}
@Override
public OFActionSetNwTtl.Builder setNwTtl(short nwTtl) {
this.nwTtl = nwTtl;
this.nwTtlSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFActionSetNwTtl build() {
short nwTtl = this.nwTtlSet ? this.nwTtl : parentMessage.nwTtl;
//
return new OFActionSetNwTtlVer14(
nwTtl
);
}
}
static class Builder implements OFActionSetNwTtl.Builder {
// OF message fields
private boolean nwTtlSet;
private short nwTtl;
@Override
public OFActionType getType() {
return OFActionType.SET_NW_TTL;
}
@Override
public short getNwTtl() {
return nwTtl;
}
@Override
public OFActionSetNwTtl.Builder setNwTtl(short nwTtl) {
this.nwTtl = nwTtl;
this.nwTtlSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFActionSetNwTtl build() {
short nwTtl = this.nwTtlSet ? this.nwTtl : DEFAULT_NW_TTL;
return new OFActionSetNwTtlVer14(
nwTtl
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFActionSetNwTtl> {
@Override
public OFActionSetNwTtl readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 23
short type = bb.readShort();
if(type != (short) 0x17)
throw new OFParseError("Wrong type: Expected=OFActionType.SET_NW_TTL(23), got="+type);
int length = U16.f(bb.readShort());
if(length != 8)
throw new OFParseError("Wrong length: Expected=8(8), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
short nwTtl = U8.f(bb.readByte());
// pad: 3 bytes
bb.skipBytes(3);
OFActionSetNwTtlVer14 actionSetNwTtlVer14 = new OFActionSetNwTtlVer14(
nwTtl
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", actionSetNwTtlVer14);
return actionSetNwTtlVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFActionSetNwTtlVer14Funnel FUNNEL = new OFActionSetNwTtlVer14Funnel();
static class OFActionSetNwTtlVer14Funnel implements Funnel<OFActionSetNwTtlVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFActionSetNwTtlVer14 message, PrimitiveSink sink) {
// fixed value property type = 23
sink.putShort((short) 0x17);
// fixed value property length = 8
sink.putShort((short) 0x8);
sink.putShort(message.nwTtl);
// skip pad (3 bytes)
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFActionSetNwTtlVer14> {
@Override
public void write(ByteBuf bb, OFActionSetNwTtlVer14 message) {
// fixed value property type = 23
bb.writeShort((short) 0x17);
// fixed value property length = 8
bb.writeShort((short) 0x8);
bb.writeByte(U8.t(message.nwTtl));
// pad: 3 bytes
bb.writeZero(3);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFActionSetNwTtlVer14(");
b.append("nwTtl=").append(nwTtl);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFActionSetNwTtlVer14 other = (OFActionSetNwTtlVer14) obj;
if( nwTtl != other.nwTtl)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + nwTtl;
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.healthcare;
import com.google.api.services.healthcare.v1beta1.model.Message;
import com.google.auto.value.AutoValue;
import java.io.IOException;
import java.text.ParseException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.coders.CoderRegistry;
import org.apache.beam.sdk.coders.ListCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubIO;
import org.apache.beam.sdk.io.range.OffsetRange;
import org.apache.beam.sdk.metrics.Counter;
import org.apache.beam.sdk.metrics.Distribution;
import org.apache.beam.sdk.metrics.Metrics;
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.DoFn.BoundedPerElement;
import org.apache.beam.sdk.transforms.FlatMapElements;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Reshuffle;
import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
import org.apache.beam.sdk.values.PInput;
import org.apache.beam.sdk.values.POutput;
import org.apache.beam.sdk.values.PValue;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.TupleTagList;
import org.apache.beam.sdk.values.TypeDescriptors;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Throwables;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.FluentIterable;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@link HL7v2IO} provides an API for reading from and writing to <a
* href="https://cloud.google.com/healthcare/docs/concepts/hl7v2">Google Cloud Healthcare HL7v2 API.
* </a>
*
* <p>Read
*
* <p>HL7v2 Messages can be fetched from the HL7v2 store in two ways Message Fetching and Message
* Listing.
*
* <p>Message Fetching
*
* <p>Message Fetching with {@link HL7v2IO.Read} supports use cases where you have a ${@link
* PCollection} of message IDS. This is appropriate for reading the HL7v2 notifications from a
* Pub/Sub subscription with {@link PubsubIO#readStrings()} or in cases where you have a manually
* prepared list of messages that you need to process (e.g. in a text file read with {@link
* org.apache.beam.sdk.io.TextIO}) .
*
* <p>Fetch Message contents from HL7v2 Store based on the {@link PCollection} of message ID strings
* {@link HL7v2IO.Read.Result} where one can call {@link Read.Result#getMessages()} to retrived a
* {@link PCollection} containing the successfully fetched {@link HL7v2Message}s and/or {@link
* Read.Result#getFailedReads()} to retrieve a {@link PCollection} of {@link HealthcareIOError}
* containing the msgID that could not be fetched and the exception as a {@link HealthcareIOError},
* this can be used to write to the dead letter storage system of your choosing. This error handling
* is mainly to catch scenarios where the upstream {@link PCollection} contains IDs that are not
* valid or are not reachable due to permissions issues.
*
* <p>Message Listing Message Listing with {@link HL7v2IO.ListHL7v2Messages} supports batch use
* cases where you want to process all the messages in an HL7v2 store or those matching a
* filter @see <a
* href=>https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list#query-parameters</a>
* This paginates through results of a Messages.List call @see <a
* href=>https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list</a>
* and outputs directly to a {@link PCollection} of {@link HL7v2Message}. In these use cases, the
* error handling similar to above is unnecessary because we are listing from the source of truth
* the pipeline should fail transparently if this transform fails to paginate through all the
* results.
*
* <p>Write
*
* <p>A bounded or unbounded {@link PCollection} of {@link HL7v2Message} can be ingested into an
* HL7v2 store using {@link HL7v2IO#ingestMessages(String)}. This will return a {@link
* HL7v2IO.Write.Result} on which you can call {@link Write.Result#getFailedInsertsWithErr()} to
* retrieve a {@link PCollection} of {@link HealthcareIOError} containing the {@link HL7v2Message}
* that failed to be ingested and the exception. This can be used to write to the dead letter
* storage system of your chosing.
*
* <p>Unbounded Read Example:
*
* <pre>{@code
* PipelineOptions options = ...;
* Pipeline p = Pipeline.create(options);
*
* HL7v2IO.Read.Result readResult = p
* .apply(
* "Read HL7v2 notifications",
* PubsubIO.readStrings().fromSubscription(options.getNotificationSubscription()))
* .apply(HL7v2IO.getAll());
*
* // Write errors to your favorite dead letter queue (e.g. Pub/Sub, GCS, BigQuery)
* readResult.getFailedReads().apply("WriteToDeadLetterQueue", ...);
*
*
* // Go about your happy path transformations.
* PCollection<HL7v2Message> out = readResult.getMessages().apply("ProcessFetchedMessages", ...);
*
* // Write using the Message.Ingest method of the HL7v2 REST API.
* out.apply(HL7v2IO.ingestMessages(options.getOutputHL7v2Store()));
*
* pipeline.run();
*
* }***
* </pre>
*
* <p>Bounded Read Example:
*
* <pre>{@code
* PipelineOptions options = ...;
* Pipeline p = Pipeline.create(options);
*
* PCollection<HL7v2Message> out = p
* .apply(
* "List messages in HL7v2 store with filter",
* ListHL7v2Messages(
* Collections.singletonList(options.getInputHL7v2Store()), option.getHL7v2Filter()))
* // Go about your happy path transformations.
* .apply("Process HL7v2 Messages", ...);
* pipeline.run().waitUntilFinish();
* }***
* </pre>
*/
public class HL7v2IO {
/** Write HL7v2 Messages to a store. */
private static Write.Builder write(String hl7v2Store) {
return new AutoValue_HL7v2IO_Write.Builder().setHL7v2Store(StaticValueProvider.of(hl7v2Store));
}
/** Write HL7v2 Messages to a store. */
private static Write.Builder write(ValueProvider<String> hl7v2Store) {
return new AutoValue_HL7v2IO_Write.Builder().setHL7v2Store(hl7v2Store);
}
/**
* Retrieve all HL7v2 Messages from a PCollection of message IDs (such as from PubSub notification
* subscription).
*/
public static Read getAll() {
return new Read();
}
/** Read all HL7v2 Messages from multiple stores. */
public static ListHL7v2Messages readAll(List<String> hl7v2Stores) {
return new ListHL7v2Messages(StaticValueProvider.of(hl7v2Stores), StaticValueProvider.of(null));
}
/** Read all HL7v2 Messages from multiple stores. */
public static ListHL7v2Messages readAll(ValueProvider<List<String>> hl7v2Stores) {
return new ListHL7v2Messages(hl7v2Stores, StaticValueProvider.of(null));
}
/** Read all HL7v2 Messages from a single store. */
public static ListHL7v2Messages read(String hl7v2Store) {
return new ListHL7v2Messages(
StaticValueProvider.of(Collections.singletonList(hl7v2Store)),
StaticValueProvider.of(null));
}
/** Read all HL7v2 Messages from a single store. */
public static ListHL7v2Messages read(ValueProvider<String> hl7v2Store) {
return new ListHL7v2Messages(
StaticValueProvider.of(Collections.singletonList(hl7v2Store.get())),
StaticValueProvider.of(null));
}
/**
* Read all HL7v2 Messages from a single store matching a filter.
*
* @see <a
* href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list#query-parameters></a>
*/
public static ListHL7v2Messages readWithFilter(String hl7v2Store, String filter) {
return new ListHL7v2Messages(
StaticValueProvider.of(Collections.singletonList(hl7v2Store)),
StaticValueProvider.of(filter));
}
/**
* Read all HL7v2 Messages from a single store matching a filter.
*
* @see <a
* href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list#query-parameters></a>
*/
public static ListHL7v2Messages readWithFilter(
ValueProvider<String> hl7v2Store, ValueProvider<String> filter) {
return new ListHL7v2Messages(
StaticValueProvider.of(Collections.singletonList(hl7v2Store.get())), filter);
}
/**
* Read all HL7v2 Messages from a multiple stores matching a filter.
*
* @see <a
* href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list#query-parameters></a>
*/
public static ListHL7v2Messages readAllWithFilter(List<String> hl7v2Stores, String filter) {
return new ListHL7v2Messages(
StaticValueProvider.of(hl7v2Stores), StaticValueProvider.of(filter));
}
/**
* Read all HL7v2 Messages from a multiple stores matching a filter.
*
* @see <a
* href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list#query-parameters></a>
*/
public static ListHL7v2Messages readAllWithFilter(
ValueProvider<List<String>> hl7v2Stores, ValueProvider<String> filter) {
return new ListHL7v2Messages(hl7v2Stores, filter);
}
/**
* Write with Messages.Ingest method. @see <a
* href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/ingest></a>
*
* @param hl7v2Store the hl 7 v 2 store
* @return the write
*/
public static Write ingestMessages(String hl7v2Store) {
return write(hl7v2Store).setWriteMethod(Write.WriteMethod.INGEST).build();
}
/**
* The type Read that reads HL7v2 message contents given a PCollection of message IDs strings.
*
* <p>These could be sourced from any {@link PCollection} of {@link String}s but the most popular
* patterns would be {@link PubsubIO#readStrings()} reading a subscription on an HL7v2 Store's
* notification channel topic or using {@link ListHL7v2Messages} to list HL7v2 message IDs with an
* optional filter using Ingest write method. @see <a
* href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list></a>.
*/
public static class Read extends PTransform<PCollection<String>, Read.Result> {
public Read() {}
public static class Result implements POutput, PInput {
private PCollection<HL7v2Message> messages;
private PCollection<HealthcareIOError<String>> failedReads;
PCollectionTuple pct;
public static Result of(PCollectionTuple pct) throws IllegalArgumentException {
if (pct.getAll()
.keySet()
.containsAll((Collection<?>) TupleTagList.of(OUT).and(DEAD_LETTER))) {
return new Result(pct);
} else {
throw new IllegalArgumentException(
"The PCollection tuple must have the HL7v2IO.Read.OUT "
+ "and HL7v2IO.Read.DEAD_LETTER tuple tags");
}
}
private Result(PCollectionTuple pct) {
this.pct = pct;
this.messages = pct.get(OUT).setCoder(HL7v2MessageCoder.of());
this.failedReads =
pct.get(DEAD_LETTER).setCoder(HealthcareIOErrorCoder.of(StringUtf8Coder.of()));
}
public PCollection<HealthcareIOError<String>> getFailedReads() {
return failedReads;
}
public PCollection<HL7v2Message> getMessages() {
return messages;
}
@Override
public Pipeline getPipeline() {
return this.pct.getPipeline();
}
@Override
public Map<TupleTag<?>, PValue> expand() {
return ImmutableMap.of(OUT, messages);
}
@Override
public void finishSpecifyingOutput(
String transformName, PInput input, PTransform<?, ?> transform) {}
}
/** The tag for the main output of HL7v2 Messages. */
public static final TupleTag<HL7v2Message> OUT = new TupleTag<HL7v2Message>() {};
/** The tag for the deadletter output of HL7v2 Messages. */
public static final TupleTag<HealthcareIOError<String>> DEAD_LETTER =
new TupleTag<HealthcareIOError<String>>() {};
@Override
public Result expand(PCollection<String> input) {
CoderRegistry coderRegistry = input.getPipeline().getCoderRegistry();
coderRegistry.registerCoderForClass(HL7v2Message.class, HL7v2MessageCoder.of());
return input.apply("Fetch HL7v2 messages", new FetchHL7v2Message());
}
/**
* {@link PTransform} to fetch a message from an Google Cloud Healthcare HL7v2 store based on
* msgID.
*
* <p>This DoFn consumes a {@link PCollection} of notifications {@link String}s from the HL7v2
* store, and fetches the actual {@link HL7v2Message} object based on the id in the notification
* and will output a {@link PCollectionTuple} which contains the output and dead-letter {@link
* PCollection}.
*
* <p>The {@link PCollectionTuple} output will contain the following {@link PCollection}:
*
* <ul>
* <li>{@link HL7v2IO.Read#OUT} - Contains all {@link PCollection} records successfully read
* from the HL7v2 store.
* <li>{@link HL7v2IO.Read#DEAD_LETTER} - Contains all {@link PCollection} of {@link
* HealthcareIOError} message IDs which failed to be fetched from the HL7v2 store, with
* error message and stacktrace.
* </ul>
*/
public static class FetchHL7v2Message extends PTransform<PCollection<String>, Result> {
/** Instantiates a new Fetch HL7v2 message DoFn. */
public FetchHL7v2Message() {}
@Override
public Result expand(PCollection<String> msgIds) {
CoderRegistry coderRegistry = msgIds.getPipeline().getCoderRegistry();
coderRegistry.registerCoderForClass(HL7v2Message.class, HL7v2MessageCoder.of());
return new Result(
msgIds.apply(
ParDo.of(new FetchHL7v2Message.HL7v2MessageGetFn())
.withOutputTags(HL7v2IO.Read.OUT, TupleTagList.of(HL7v2IO.Read.DEAD_LETTER))));
}
/** DoFn for fetching messages from the HL7v2 store with error handling. */
public static class HL7v2MessageGetFn extends DoFn<String, HL7v2Message> {
private Counter failedMessageGets =
Metrics.counter(FetchHL7v2Message.HL7v2MessageGetFn.class, "failed-message-reads");
private static final Logger LOG =
LoggerFactory.getLogger(FetchHL7v2Message.HL7v2MessageGetFn.class);
private final Counter successfulHL7v2MessageGets =
Metrics.counter(
FetchHL7v2Message.HL7v2MessageGetFn.class, "successful-hl7v2-message-gets");
private HealthcareApiClient client;
/** Instantiates a new Hl 7 v 2 message get fn. */
HL7v2MessageGetFn() {}
/**
* Instantiate healthcare client.
*
* @throws IOException the io exception
*/
@Setup
public void instantiateHealthcareClient() throws IOException {
this.client = new HttpHealthcareApiClient();
}
/**
* Process element.
*
* @param context the context
*/
@ProcessElement
public void processElement(ProcessContext context) {
String msgId = context.element();
try {
context.output(HL7v2Message.fromModel(fetchMessage(this.client, msgId)));
} catch (Exception e) {
failedMessageGets.inc();
LOG.warn(
String.format(
"Error fetching HL7v2 message with ID %s writing to Dead Letter "
+ "Queue. Cause: %s Stack Trace: %s",
msgId, e.getMessage(), Throwables.getStackTraceAsString(e)));
context.output(HL7v2IO.Read.DEAD_LETTER, HealthcareIOError.of(msgId, e));
}
}
private Message fetchMessage(HealthcareApiClient client, String msgId)
throws IOException, ParseException, IllegalArgumentException, InterruptedException {
long startTime = System.currentTimeMillis();
try {
com.google.api.services.healthcare.v1beta1.model.Message msg =
client.getHL7v2Message(msgId);
if (msg == null) {
throw new IOException(String.format("GET request for %s returned null", msgId));
}
this.successfulHL7v2MessageGets.inc();
return msg;
} catch (Exception e) {
throw e;
}
}
}
}
}
/**
* List HL7v2 messages in HL7v2 Stores with optional filter.
*
* <p>This transform is optimized for splitting of message.list calls for large batches of
* historical data and assumes rather continuous stream of sendTimes.
*
* <p>Note on Benchmarking: The default initial splitting on day will make more queries than
* necessary when used with very small data sets (or very sparse data sets in the sendTime
* dimension). If you are looking to get an accurate benchmark be sure to use sufficient volume of
* data with messages that span sendTimes over a realistic time range (days)
*
* <p>Implementation includes overhead for:
*
* <ol>
* <li>two api calls to determine the min/max sendTime of the HL7v2 store at invocation time.
* <li>initial splitting into non-overlapping time ranges (default daily) to achieve
* parallelization in separate messages.list calls.
* </ol>
*
* If your use case doesn't lend itself to daily splitting, you can can control initial splitting
* with {@link ListHL7v2Messages#withInitialSplitDuration(Duration)}
*/
public static class ListHL7v2Messages extends PTransform<PBegin, PCollection<HL7v2Message>> {
private final ValueProvider<List<String>> hl7v2Stores;
private final ValueProvider<String> filter;
private Duration initialSplitDuration;
/**
* Instantiates a new List HL7v2 message IDs with filter.
*
* @param hl7v2Stores the HL7v2 stores
* @param filter the filter
*/
ListHL7v2Messages(ValueProvider<List<String>> hl7v2Stores, ValueProvider<String> filter) {
this.hl7v2Stores = hl7v2Stores;
this.filter = filter;
this.initialSplitDuration = null;
}
public ListHL7v2Messages withInitialSplitDuration(Duration initialSplitDuration) {
this.initialSplitDuration = initialSplitDuration;
return this;
}
@Override
public PCollection<HL7v2Message> expand(PBegin input) {
CoderRegistry coderRegistry = input.getPipeline().getCoderRegistry();
coderRegistry.registerCoderForClass(HL7v2Message.class, HL7v2MessageCoder.of());
return input
.apply(Create.ofProvider(this.hl7v2Stores, ListCoder.of(StringUtf8Coder.of())))
.apply(FlatMapElements.into(TypeDescriptors.strings()).via((x) -> x))
.apply(ParDo.of(new ListHL7v2MessagesFn(filter, initialSplitDuration)))
.setCoder(HL7v2MessageCoder.of())
// Break fusion to encourage parallelization of downstream processing.
.apply(Reshuffle.viaRandomKey());
}
}
/**
* Implemented as Splitable DoFn that claims millisecond resolutions of offset restrictions in the
* Message.sendTime dimension.
*/
@BoundedPerElement
@VisibleForTesting
static class ListHL7v2MessagesFn extends DoFn<String, HL7v2Message> {
// These control the initial restriction split which means that the list of integer pairs
// must comfortably fit in memory.
private static final Duration DEFAULT_DESIRED_SPLIT_DURATION = Duration.standardDays(1);
private static final Duration DEFAULT_MIN_SPLIT_DURATION = Duration.standardHours(1);
private static final Logger LOG = LoggerFactory.getLogger(ListHL7v2MessagesFn.class);
private ValueProvider<String> filter;
private Duration initialSplitDuration;
private transient HealthcareApiClient client;
/**
* Instantiates a new List HL7v2 fn.
*
* @param filter the filter
*/
ListHL7v2MessagesFn(String filter) {
this(StaticValueProvider.of(filter), null);
}
ListHL7v2MessagesFn(ValueProvider<String> filter, @Nullable Duration initialSplitDuration) {
this.filter = filter;
this.initialSplitDuration =
(initialSplitDuration == null) ? DEFAULT_DESIRED_SPLIT_DURATION : initialSplitDuration;
}
/**
* Init client.
*
* @throws IOException the io exception
*/
@Setup
public void initClient() throws IOException {
this.client = new HttpHealthcareApiClient();
}
@GetInitialRestriction
public OffsetRange getEarliestToLatestRestriction(@Element String hl7v2Store)
throws IOException {
Instant from = this.client.getEarliestHL7v2SendTime(hl7v2Store, this.filter.get());
// filters are [from, to) to match logic of OffsetRangeTracker but need latest element to be
// included in results set to add an extra ms to the upper bound.
Instant to = this.client.getLatestHL7v2SendTime(hl7v2Store, this.filter.get()).plus(1);
return new OffsetRange(from.getMillis(), to.getMillis());
}
@SplitRestriction
public void split(@Restriction OffsetRange timeRange, OutputReceiver<OffsetRange> out) {
List<OffsetRange> splits =
timeRange.split(initialSplitDuration.getMillis(), DEFAULT_MIN_SPLIT_DURATION.getMillis());
Instant from = Instant.ofEpochMilli(timeRange.getFrom());
Instant to = Instant.ofEpochMilli(timeRange.getTo());
Duration totalDuration = new Duration(from, to);
LOG.info(
String.format(
"splitting initial sendTime restriction of [minSendTime, now): [%s,%s), "
+ "or [%s, %s). \n"
+ "total days: %s \n"
+ "into %s splits. \n"
+ "Last split: %s",
from,
to,
timeRange.getFrom(),
timeRange.getTo(),
totalDuration.getStandardDays(),
splits.size(),
splits.get(splits.size() - 1).toString()));
for (OffsetRange s : splits) {
out.output(s);
}
}
/**
* List messages.
*
* @param hl7v2Store the HL7v2 store to list messages from
* @throws IOException the io exception
*/
@ProcessElement
public void listMessages(
@Element String hl7v2Store,
RestrictionTracker<OffsetRange, Long> tracker,
OutputReceiver<HL7v2Message> outputReceiver)
throws IOException {
OffsetRange currentRestriction = (OffsetRange) tracker.currentRestriction();
Instant startRestriction = Instant.ofEpochMilli(currentRestriction.getFrom());
Instant endRestriction = Instant.ofEpochMilli(currentRestriction.getTo());
HttpHealthcareApiClient.HL7v2MessagePages pages =
new HttpHealthcareApiClient.HL7v2MessagePages(
client, hl7v2Store, startRestriction, endRestriction, filter.get(), "sendTime");
Instant cursor;
long lastClaimedMilliSecond = startRestriction.getMillis() - 1;
for (HL7v2Message msg : FluentIterable.concat(pages)) {
cursor = Instant.parse(msg.getSendTime());
if (cursor.getMillis() > lastClaimedMilliSecond) {
// Return early after the first claim failure preventing us from iterating
// through the remaining messages.
if (!tracker.tryClaim(cursor.getMillis())) {
return;
}
lastClaimedMilliSecond = cursor.getMillis();
}
outputReceiver.output(msg);
}
// We've paginated through all messages for this restriction but the last message may be
// before the end of the restriction
tracker.tryClaim(currentRestriction.getTo());
}
}
/** The type Write. */
@AutoValue
public abstract static class Write extends PTransform<PCollection<HL7v2Message>, Write.Result> {
/** The tag for the successful writes to HL7v2 store`. */
public static final TupleTag<HealthcareIOError<HL7v2Message>> SUCCESS =
new TupleTag<HealthcareIOError<HL7v2Message>>() {};
/** The tag for the failed writes to HL7v2 store`. */
public static final TupleTag<HealthcareIOError<HL7v2Message>> FAILED =
new TupleTag<HealthcareIOError<HL7v2Message>>() {};
/**
* Gets HL7v2 store.
*
* @return the HL7v2 store
*/
abstract ValueProvider<String> getHL7v2Store();
/**
* Gets write method.
*
* @return the write method
*/
abstract WriteMethod getWriteMethod();
@Override
public Result expand(PCollection<HL7v2Message> messages) {
CoderRegistry coderRegistry = messages.getPipeline().getCoderRegistry();
coderRegistry.registerCoderForClass(HL7v2Message.class, HL7v2MessageCoder.of());
return messages.apply(new WriteHL7v2(this.getHL7v2Store(), this.getWriteMethod()));
}
/** The enum Write method. */
public enum WriteMethod {
/**
* Ingest write method. @see <a
* href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/ingest></a>
*/
INGEST,
/**
* Batch import write method. This is not yet supported by the HL7v2 API, but can be used to
* improve throughput once available.
*/
BATCH_IMPORT
}
/** The type Builder. */
@AutoValue.Builder
abstract static class Builder {
/**
* Sets HL7v2 store.
*
* @param hl7v2Store the HL7v2 store
* @return the HL7v2 store
*/
abstract Builder setHL7v2Store(ValueProvider<String> hl7v2Store);
/**
* Sets write method.
*
* @param writeMethod the write method
* @return the write method
*/
abstract Builder setWriteMethod(WriteMethod writeMethod);
/**
* Build write.
*
* @return the write
*/
abstract Write build();
}
public static class Result implements POutput {
private final Pipeline pipeline;
private final PCollection<HealthcareIOError<HL7v2Message>> failedInsertsWithErr;
/** Creates a {@link HL7v2IO.Write.Result} in the given {@link Pipeline}. */
static Result in(
Pipeline pipeline, PCollection<HealthcareIOError<HL7v2Message>> failedInserts) {
return new Result(pipeline, failedInserts);
}
public PCollection<HealthcareIOError<HL7v2Message>> getFailedInsertsWithErr() {
return this.failedInsertsWithErr;
}
@Override
public Pipeline getPipeline() {
return this.pipeline;
}
@Override
public Map<TupleTag<?>, PValue> expand() {
failedInsertsWithErr.setCoder(HealthcareIOErrorCoder.of(HL7v2MessageCoder.of()));
return ImmutableMap.of(FAILED, failedInsertsWithErr);
}
@Override
public void finishSpecifyingOutput(
String transformName, PInput input, PTransform<?, ?> transform) {}
private Result(
Pipeline pipeline, PCollection<HealthcareIOError<HL7v2Message>> failedInsertsWithErr) {
this.pipeline = pipeline;
this.failedInsertsWithErr = failedInsertsWithErr;
}
}
}
/** The type Write hl 7 v 2. */
static class WriteHL7v2 extends PTransform<PCollection<HL7v2Message>, Write.Result> {
private final ValueProvider<String> hl7v2Store;
private final Write.WriteMethod writeMethod;
/**
* Instantiates a new Write hl 7 v 2.
*
* @param hl7v2Store the hl 7 v 2 store
* @param writeMethod the write method
*/
WriteHL7v2(ValueProvider<String> hl7v2Store, Write.WriteMethod writeMethod) {
this.hl7v2Store = hl7v2Store;
this.writeMethod = writeMethod;
}
@Override
public Write.Result expand(PCollection<HL7v2Message> input) {
PCollection<HealthcareIOError<HL7v2Message>> failedInserts =
input
.apply(ParDo.of(new WriteHL7v2Fn(hl7v2Store, writeMethod)))
.setCoder(HealthcareIOErrorCoder.of(HL7v2MessageCoder.of()));
return Write.Result.in(input.getPipeline(), failedInserts);
}
/** The type Write hl 7 v 2 fn. */
static class WriteHL7v2Fn extends DoFn<HL7v2Message, HealthcareIOError<HL7v2Message>> {
// TODO when the healthcare API releases a bulk import method this should use that to improve
// throughput.
private Distribution messageIngestLatencyMs =
Metrics.distribution(WriteHL7v2Fn.class, "message-ingest-latency-ms");
private Counter failedMessageWrites =
Metrics.counter(WriteHL7v2Fn.class, "failed-hl7v2-message-writes");
private final ValueProvider<String> hl7v2Store;
private final Counter successfulHL7v2MessageWrites =
Metrics.counter(WriteHL7v2.class, "successful-hl7v2-message-writes");
private final Write.WriteMethod writeMethod;
private static final Logger LOG = LoggerFactory.getLogger(WriteHL7v2.WriteHL7v2Fn.class);
private transient HealthcareApiClient client;
/**
* Instantiates a new Write HL7v2 fn.
*
* @param hl7v2Store the HL7v2 store
* @param writeMethod the write method
*/
WriteHL7v2Fn(ValueProvider<String> hl7v2Store, Write.WriteMethod writeMethod) {
this.hl7v2Store = hl7v2Store;
this.writeMethod = writeMethod;
}
/**
* Init client.
*
* @throws IOException the io exception
*/
@Setup
public void initClient() throws IOException {
this.client = new HttpHealthcareApiClient();
}
/**
* Write messages.
*
* @param context the context
*/
@ProcessElement
public void writeMessages(ProcessContext context) {
HL7v2Message msg = context.element();
// all fields but data and labels should be null for ingest.
Message model = new Message();
model.setData(msg.getData());
model.setLabels(msg.getLabels());
switch (writeMethod) {
case BATCH_IMPORT:
// TODO once healthcare API exposes batch import API add that functionality here to
// improve performance this should be the new default behavior/List.
throw new UnsupportedOperationException("The Batch import API is not available yet");
case INGEST:
default:
try {
long requestTimestamp = Instant.now().getMillis();
client.ingestHL7v2Message(hl7v2Store.get(), model);
messageIngestLatencyMs.update(Instant.now().getMillis() - requestTimestamp);
} catch (Exception e) {
failedMessageWrites.inc();
LOG.warn(
String.format(
"Failed to ingest message Error: %s Stacktrace: %s",
e.getMessage(), Throwables.getStackTraceAsString(e)));
HealthcareIOError<HL7v2Message> err = HealthcareIOError.of(msg, e);
LOG.warn(String.format("%s %s", err.getErrorMessage(), err.getStackTrace()));
context.output(err);
}
}
}
}
}
}
| |
package io.cloudslang.content.mail.services;
import com.sun.mail.util.ASCIIUtility;
import org.bouncycastle.cms.RecipientId;
import org.bouncycastle.cms.RecipientInformation;
import org.bouncycastle.cms.RecipientInformationStore;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.mail.smime.SMIMEEnveloped;
import org.bouncycastle.mail.smime.SMIMEUtil;
import io.cloudslang.content.mail.entities.GetMailMessageInputs;
import io.cloudslang.content.mail.entities.SimpleAuthenticator;
import io.cloudslang.content.mail.entities.StringOutputStream;
import io.cloudslang.content.mail.sslconfig.EasyX509TrustManager;
import io.cloudslang.content.mail.sslconfig.SSLUtils;
import javax.mail.*;
import javax.mail.NoSuchProviderException;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMultipart;
import javax.mail.internet.MimeUtility;
import javax.net.ssl.KeyManager;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import java.io.*;
import java.net.URL;
import java.security.*;
import java.security.cert.X509Certificate;
import java.util.*;
/**
* Created by giloan on 11/3/2014.
*/
public class GetMailMessage {
public static final String RETURN_RESULT = "returnResult";
public static final String SUBJECT = "Subject";
public static final String BODY_RESULT = "Body";
public static final String PLAIN_TEXT_BODY_RESULT = "plainTextBody";
public static final String ATTACHED_FILE_NAMES_RESULT = "AttachedFileNames";
public static final String RETURN_CODE = "returnCode";
public static final String EXCEPTION = "exception";
public static final String SUCCESS = "success";
public static final String FAILURE = "failure";
public static final String SUCCESS_RETURN_CODE = "0";
public static final String FAILURE_RETURN_CODE = "-1";
public static final String FILE = "file:";
public static final String HTTP = "http";
public static final String DEFAULT_PASSWORD_FOR_STORE = "changeit";
public static final String POP3 = "pop3";
public static final String IMAP = "imap";
public static final String IMAP_4 = "imap4";
public static final String IMAP_PORT = "143";
public static final String POP3_PORT = "110";
public static final String PLEASE_SPECIFY_THE_PORT_FOR_THE_INDICATED_PROTOCOL = "Please specify the port for the indicated protocol.";
public static final String PLEASE_SPECIFY_THE_PORT_THE_PROTOCOL_OR_BOTH = "Please specify the port, the protocol, or both.";
public static final String PLEASE_SPECIFY_THE_PROTOCOL_FOR_THE_INDICATED_PORT = "Please specify the protocol for the indicated port.";
public static final String TEXT_PLAIN = "text/plain";
public static final String TEXT_HTML = "text/html";
private static final String MULTIPART_MIXED = "multipart/mixed";
private static final String MULTIPART_RELATED = "multipart/related";
public static final String CONTENT_TYPE = "Content-Type";
public static final String SSL = "SSL";
public static final String STR_FALSE = "false";
public static final String STR_TRUE = "true";
public static final String MESSAGES_ARE_NUMBERED_STARTING_AT_1 = "Messages are numbered starting at 1 through the total number of messages in the folder!";
public static final String STR_COMMA = ",";
public static final String THE_SPECIFIED_FOLDER_DOES_NOT_EXIST_ON_THE_REMOTE_SERVER = "The specified folder does not exist on the remote server.";
public static final String UNRECOGNIZED_SSL_MESSAGE = "Unrecognized SSL message";
public static final String UNRECOGNIZED_SSL_MESSAGE_PLAINTEXT_CONNECTION = "Unrecognized SSL message, plaintext connection?";
public static final String SSL_FACTORY = "javax.net.ssl.SSLSocketFactory";
private static final String HOST_NOT_SPECIFIED = "The required host input is not specified!";
private static final String MESSAGE_NUMBER_NOT_SPECIFIED = "The required messageNumber input is not specified!";
private static final String USERNAME_NOT_SPECIFIED = "The required username input is not specified!";
private static final String FOLDER_NOT_SPECIFIED = "The required folder input is not specified!";
public static final String PKCS_KEYSTORE_TYPE = "PKCS12";
public static final String BOUNCY_CASTLE_PROVIDER = "BC";
public static final String ENCRYPTED_CONTENT_TYPE = "application/pkcs7-mime; name=\"smime.p7m\"; smime-type=enveloped-data";
public static final String SECURE_SUFFIX_FOR_POP3_AND_IMAP = "s";
//Operation inputs
private String host;
private String port;
private String protocol;
private String username;
private String password;
private String folder;
private boolean trustAllRoots;
/**
* The relative position of the message in the folder. Numbering starts from 1.
*/
private int messageNumber;
private boolean subjectOnly = true;
private boolean enableSSL;
private boolean enableTLS;
private String keystore;
private String keystorePassword;
private String trustKeystoreFile;
private String trustPassword;
private String characterSet;
private String decryptionKeystore;
private String decryptionKeyAlias;
private String decryptionKeystorePass;
private boolean deleteUponRetrieval;
private boolean decryptMessage;
private int timeout = -1;
private RecipientId recId = null;
private KeyStore ks = null;
public Map<String, String> execute(GetMailMessageInputs getMailMessageInputs) throws Exception {
Map<String, String> result = new HashMap<>();
try {
processInputs(getMailMessageInputs);
Message message = getMessage();
if (decryptMessage) {
addDecryptionSettings();
}
//delete message
if (deleteUponRetrieval) {
message.setFlag(Flags.Flag.DELETED, true);
}
if (subjectOnly) {
String subject;
if ((characterSet != null) && (characterSet.trim().length() > 0)) { //need to force the decode charset
subject = message.getHeader(SUBJECT)[0];
subject = changeHeaderCharset(subject, characterSet);
subject = MimeUtility.decodeText(subject);
} else {
subject = message.getSubject();
}
if (subject == null) {
subject = "";
}
result.put(RETURN_RESULT, MimeUtility.decodeText(subject));
} else {
try {
// Get subject and attachedFileNames
if ((characterSet != null) && (characterSet.trim().length() > 0)) { //need to force the decode charset
String subject = message.getHeader(SUBJECT)[0];
subject = changeHeaderCharset(subject, characterSet);
result.put(SUBJECT, MimeUtility.decodeText(subject));
String attachedFileNames = changeHeaderCharset(getAttachedFileNames(message), characterSet);
result.put(ATTACHED_FILE_NAMES_RESULT, decodeAttachedFileNames(attachedFileNames));
} else { //let everything as the sender intended it to be :)
String subject = message.getSubject();
if (subject == null)
subject = "";
result.put(SUBJECT, MimeUtility.decodeText(subject));
result.put(ATTACHED_FILE_NAMES_RESULT, decodeAttachedFileNames((getAttachedFileNames(message))));
}
// Get the message body
Map<String, String> messageByTypes = getMessageByContentTypes(message, characterSet);
String lastMessageBody = "";
if(!messageByTypes.isEmpty()) {
lastMessageBody = new LinkedList<>(messageByTypes.values()).getLast();
}
if(lastMessageBody == null) {
lastMessageBody = "";
}
result.put(BODY_RESULT, MimeUtility.decodeText(lastMessageBody));
String plainTextBody = messageByTypes.containsKey(TEXT_PLAIN) ? messageByTypes.get(TEXT_PLAIN) : "";
result.put(PLAIN_TEXT_BODY_RESULT, MimeUtility.decodeText(plainTextBody));
StringOutputStream stream = new StringOutputStream();
message.writeTo(stream);
result.put(RETURN_RESULT, stream.toString().replaceAll("" + (char) 0, ""));
} catch (UnsupportedEncodingException except) {
throw new UnsupportedEncodingException("The given encoding (" + characterSet + ") is invalid or not supported.");
}
}
result.put(RETURN_CODE, SUCCESS_RETURN_CODE);
} catch (Exception e) {
if (e.toString().contains(UNRECOGNIZED_SSL_MESSAGE)) {
throw new Exception(UNRECOGNIZED_SSL_MESSAGE_PLAINTEXT_CONNECTION);
} else {
throw e;
}
}
return result;
}
protected Message getMessage() throws Exception {
Store store = createMessageStore();
Folder f = store.getFolder(folder);
if (!f.exists()) {
throw new Exception(THE_SPECIFIED_FOLDER_DOES_NOT_EXIST_ON_THE_REMOTE_SERVER);
}
f.open(getFolderOpenMode());
if (messageNumber > f.getMessageCount())
throw new IndexOutOfBoundsException("message value was: " + messageNumber + " there are only " + f.getMessageCount() + " messages in folder");
return f.getMessage(messageNumber);
}
protected Store createMessageStore() throws Exception {
Properties props = new Properties();
if(timeout > 0) {
props.put("mail." + protocol + ".timeout", timeout);
}
Authenticator auth = new SimpleAuthenticator(username, password);
Store store;
if (enableTLS || enableSSL) addSSLSettings(trustAllRoots, keystore, keystorePassword, trustKeystoreFile, trustPassword);
if (enableTLS) {
store = tryTLSOtherwiseTrySSL(props, auth);
} else if (enableSSL) {
store = connectUsingSSL(props, auth);
} else {
store = configureStoreWithoutSSL(props, auth);
store.connect();
}
return store;
}
private Store tryTLSOtherwiseTrySSL(Properties props, Authenticator auth) throws MessagingException {
Store store = configureStoreWithTLS(props, auth);
try {
store.connect(host, username, password);
} catch (Exception e) {
if (enableSSL) {
clearTLSProperties(props);
store = connectUsingSSL(props, auth);
} else {
throw e;
}
}
return store;
}
private Store connectUsingSSL(Properties props, Authenticator auth) throws MessagingException {
Store store = configureStoreWithSSL(props, auth);
store.connect();
return store;
}
private void clearTLSProperties(Properties props) {
props.remove("mail." + protocol + ".ssl.enable");
props.remove("mail." + protocol + ".starttls.enable");
props.remove("mail." + protocol + ".starttls.required");
}
protected Store configureStoreWithSSL(Properties props, Authenticator auth) throws NoSuchProviderException {
props.setProperty("mail." + protocol + ".socketFactory.class", SSL_FACTORY);
props.setProperty("mail." + protocol + ".socketFactory.fallback", STR_FALSE);
props.setProperty("mail." + protocol + ".port", port);
props.setProperty("mail." + protocol + ".socketFactory.port", port);
URLName url = new URLName(protocol, host, Integer.parseInt(port), "", username, password);
Session session = Session.getInstance(props, auth);
return session.getStore(url);
}
protected Store configureStoreWithTLS(Properties props, Authenticator auth) throws NoSuchProviderException {
props.setProperty("mail." + protocol + ".ssl.enable", STR_FALSE);
props.setProperty("mail." + protocol + ".starttls.enable", STR_TRUE);
props.setProperty("mail." + protocol + ".starttls.required", STR_TRUE);
Session session = Session.getInstance(props, auth);
return session.getStore(protocol + SECURE_SUFFIX_FOR_POP3_AND_IMAP);
}
protected Store configureStoreWithoutSSL(Properties props, Authenticator auth) throws NoSuchProviderException {
props.put("mail." + protocol + ".host", host);
props.put("mail." + protocol + ".port", port);
Session s = Session.getInstance(props, auth);
return s.getStore(protocol);
}
protected void addSSLSettings(boolean trustAllRoots, String keystore,
String keystorePassword, String trustKeystore, String trustPassword) throws Exception {
boolean useClientCert = false;
boolean useTrustCert = false;
String separator = getSystemFileSeparator();
String javaKeystore = getSystemJavaHome() + separator + "lib" + separator + "security" + separator + "cacerts";
if (keystore.length() == 0 && !trustAllRoots) {
boolean storeExists = new File(javaKeystore).exists();
keystore = (storeExists) ? FILE + javaKeystore : null;
if (null != keystorePassword) {
if (keystorePassword.equals("")) {
keystorePassword = DEFAULT_PASSWORD_FOR_STORE;
}
}
useClientCert = storeExists;
} else {
if (!trustAllRoots) {
if (!keystore.startsWith(HTTP))
keystore = FILE + keystore;
useClientCert = true;
}
}
if (trustKeystore.length() == 0 && !trustAllRoots) {
boolean storeExists = new File(javaKeystore).exists();
trustKeystore = (storeExists) ? FILE + javaKeystore : null;
trustPassword = (storeExists) ? ((trustPassword.equals("")) ? DEFAULT_PASSWORD_FOR_STORE : trustPassword) : null;
useTrustCert = storeExists;
} else {
if (!trustAllRoots) {
if (!trustKeystore.startsWith(HTTP))
trustKeystore = FILE + trustKeystore;
useTrustCert = true;
}
}
SSLContext context = SSLContext.getInstance(SSL);
TrustManager[] trustManagers = null;
KeyManager[] keyManagers = null;
if (trustAllRoots) {
trustManagers = new TrustManager[]{new EasyX509TrustManager()};
}
if (useTrustCert) {
KeyStore trustKeyStore = SSLUtils.createKeyStore(new URL(trustKeystore), trustPassword);
trustManagers = SSLUtils.createAuthTrustManagers(trustKeyStore);
}
if (useClientCert) {
KeyStore clientKeyStore = SSLUtils.createKeyStore(new URL(keystore), keystorePassword);
keyManagers = SSLUtils.createKeyManagers(clientKeyStore, keystorePassword);
}
context.init(keyManagers, trustManagers, new SecureRandom());
SSLContext.setDefault(context);
}
private void addDecryptionSettings() throws Exception {
char[] smimePw = new String(decryptionKeystorePass).toCharArray();
Security.addProvider(new BouncyCastleProvider());
ks = KeyStore.getInstance(PKCS_KEYSTORE_TYPE, BOUNCY_CASTLE_PROVIDER);
InputStream decryptionStream = new URL(decryptionKeystore).openStream();
try {
ks.load(decryptionStream, smimePw);
} finally {
decryptionStream.close();
}
if (decryptionKeyAlias.equals("")) {
Enumeration e = ks.aliases();
while (e.hasMoreElements()) {
String alias = (String) e.nextElement();
if (ks.isKeyEntry(alias)) {
decryptionKeyAlias = alias;
}
}
if (decryptionKeyAlias.equals(""))
{
throw new Exception("Can't find a private key!");
}
}
//
// find the certificate for the private key and generate a
// suitable recipient identifier.
//
X509Certificate cert = (X509Certificate)ks.getCertificate(decryptionKeyAlias);
if(null == cert) {
throw new Exception("Can't find a key pair with alias \"" + decryptionKeyAlias + "\" in the given keystore");
}
recId = new RecipientId();
recId.setSerialNumber(cert.getSerialNumber());
recId.setIssuer(cert.getIssuerX500Principal().getEncoded());
}
protected String getSystemFileSeparator() {
return System.getProperty("file.separator");
}
protected String getSystemJavaHome() {
return System.getProperty("java.home");
}
protected void processInputs(GetMailMessageInputs getMailMessageInputs) throws Exception {
String strHost = getMailMessageInputs.getHostname();
if (null == strHost || strHost.equals("")) {
throw new Exception(HOST_NOT_SPECIFIED);
} else {
host = strHost.trim();
}
port = getMailMessageInputs.getPort();
protocol = getMailMessageInputs.getProtocol();
String strUsername = getMailMessageInputs.getUsername();
if (null == strUsername || strUsername.equals("")) {
throw new Exception(USERNAME_NOT_SPECIFIED);
} else {
username = strUsername.trim();
}
String strPassword = getMailMessageInputs.getPassword();
if (null == strPassword) {
password = "";
} else {
password = strPassword.trim();
}
String strFolder = getMailMessageInputs.getFolder();
if (null == strFolder || strFolder.equals("")) {
throw new Exception(FOLDER_NOT_SPECIFIED);
} else {
folder = strFolder.trim();
}
String trustAll = getMailMessageInputs.getTrustAllRoots();
// Default value of trustAllRoots is true
trustAllRoots = !(null != trustAll && trustAll.equalsIgnoreCase(STR_FALSE));
String strMessageNumber = getMailMessageInputs.getMessageNumber();
if (strMessageNumber == null || strMessageNumber.equals("")) {
throw new Exception(MESSAGE_NUMBER_NOT_SPECIFIED);
} else {
messageNumber = Integer.parseInt(strMessageNumber);
}
String strSubOnly = getMailMessageInputs.getSubjectOnly();
// Default value of subjectOnly is false
subjectOnly = (strSubOnly != null && strSubOnly.equalsIgnoreCase(STR_TRUE));
String strEnableSSL = getMailMessageInputs.getEnableSSL();
// Default value of enableSSL is false;
enableSSL = (null != strEnableSSL && strEnableSSL.equalsIgnoreCase(STR_TRUE));
String strEnableTLS = getMailMessageInputs.getEnableTLS();
enableTLS = (null != strEnableTLS && strEnableTLS.equalsIgnoreCase(STR_TRUE));
keystore = getMailMessageInputs.getKeystore();
keystorePassword = getMailMessageInputs.getKeystorePassword();
trustKeystoreFile = getMailMessageInputs.getTrustKeystore();
trustPassword = getMailMessageInputs.getTrustPassword();
characterSet = getMailMessageInputs.getCharacterSet();
String strDeleteUponRetrieval = getMailMessageInputs.getDeleteUponRetrieval();
// Default value for deleteUponRetrieval is false
deleteUponRetrieval = (null != strDeleteUponRetrieval && strDeleteUponRetrieval.equalsIgnoreCase(STR_TRUE));
if (messageNumber < 1) {
throw new Exception(MESSAGES_ARE_NUMBERED_STARTING_AT_1);
}
if ((protocol == null || protocol.equals("")) && (port == null || port.equals(""))) {
throw new Exception(PLEASE_SPECIFY_THE_PORT_THE_PROTOCOL_OR_BOTH);
} else if ((protocol != null && !protocol.equals("")) && (!protocol.equalsIgnoreCase(IMAP)) && (!protocol.equalsIgnoreCase(POP3)) && (!protocol.equalsIgnoreCase(IMAP_4))
&& (port == null || port.equals(""))) {
throw new Exception(PLEASE_SPECIFY_THE_PORT_FOR_THE_INDICATED_PROTOCOL);
} else if ((protocol == null || protocol.equals("")) && (port != null && !port.equals(""))
&& (!port.equalsIgnoreCase(IMAP_PORT)) && (!port.equalsIgnoreCase(POP3_PORT))) {
throw new Exception(PLEASE_SPECIFY_THE_PROTOCOL_FOR_THE_INDICATED_PORT);
} else if ((protocol == null || protocol.equals("")) && (port.trim().equalsIgnoreCase(IMAP_PORT))) {
protocol = IMAP;
} else if ((protocol == null || protocol.equals("")) && (port.trim().equalsIgnoreCase(POP3_PORT))) {
protocol = POP3;
} else if ((protocol.trim().equalsIgnoreCase(POP3)) && (port == null || port.equals(""))) {
port = POP3_PORT;
} else if ((protocol.trim().equalsIgnoreCase(IMAP)) && (port == null || port.equals(""))) {
port = IMAP_PORT;
} else if ((protocol.trim().equalsIgnoreCase(IMAP_4)) && (port == null || port.equals(""))) {
port = IMAP_PORT;
}
//The protocol should be given in lowercase to be recognised.
protocol = protocol.toLowerCase();
if (protocol.trim().equalsIgnoreCase(IMAP_4)) {
protocol = IMAP;
}
this.decryptionKeystore = getMailMessageInputs.getDecryptionKeystore();
if(this.decryptionKeystore != null && !this.decryptionKeystore.equals("")) {
if(!decryptionKeystore.startsWith(HTTP)) {
decryptionKeystore = FILE + decryptionKeystore;
}
decryptMessage = true;
decryptionKeyAlias = getMailMessageInputs.getDecryptionKeyAlias();
if(null == decryptionKeyAlias) {
decryptionKeyAlias = "";
}
decryptionKeystorePass = getMailMessageInputs.getDecryptionKeystorePassword();
if(null == decryptionKeystorePass) {
decryptionKeystorePass = "";
}
} else {
decryptMessage = false;
}
String timeout = getMailMessageInputs.getTimeout();
if(timeout != null && !timeout.isEmpty()) {
this.timeout = Integer.parseInt(timeout);
if(this.timeout <= 0) {
throw new Exception("timeout value must be a positive number");
}
this.timeout *= 1000; //timeouts in seconds
}
}
protected Map<String, String> getMessageByContentTypes(Message message, String characterSet) throws Exception {
Map<String, String> messageMap = new HashMap<>();
if (message.isMimeType(TEXT_PLAIN)) {
messageMap.put(TEXT_PLAIN, MimeUtility.decodeText(message.getContent().toString()));
} else if (message.isMimeType(TEXT_HTML)) {
messageMap.put(TEXT_HTML, MimeUtility.decodeText(convertMessage(message.getContent().toString())));
}else if (message.isMimeType(MULTIPART_MIXED) || message.isMimeType(MULTIPART_RELATED)) {
messageMap.put(MULTIPART_MIXED, extractMultipartMixedMessage(message, characterSet));
}else {
Object obj = message.getContent();
Multipart mpart = (Multipart) obj;
for (int i = 0, n = mpart.getCount(); i < n; i++) {
Part part = mpart.getBodyPart(i);
if(decryptMessage && part.getContentType() != null && part.getContentType().equals("application/pkcs7-mime; name=\"smime.p7m\"; smime-type=enveloped-data")) {
part = decryptPart((MimeBodyPart)part);
}
String disposition = part.getDisposition();
String partContentType = new String(part.getContentType().substring(0, part.getContentType().indexOf(";")));
if (disposition == null) {
if (part.getContent() instanceof MimeMultipart) { // multipart with attachment
MimeMultipart mm = (MimeMultipart) part.getContent();
for (int j = 0; j < mm.getCount(); j++) {
if (mm.getBodyPart(j).getContent() instanceof String) {
BodyPart bodyPart = mm.getBodyPart(j);
if ((characterSet != null) && (characterSet.trim().length() > 0)) {
String contentType = bodyPart.getHeader(CONTENT_TYPE)[0];
contentType = contentType.replace(contentType.substring(contentType.indexOf("=") + 1), characterSet);
bodyPart.setHeader(CONTENT_TYPE, contentType);
}
String partContentType1 = new String(bodyPart.getContentType().substring(0, bodyPart.getContentType().indexOf(";")));
messageMap.put(partContentType1, MimeUtility.decodeText(bodyPart.getContent().toString()));
}
}
} else {//multipart - w/o attachment
//if the user has specified a certain characterSet we decode his way
if ((characterSet != null) && (characterSet.trim().length() > 0)) {
InputStream istream = part.getInputStream();
ByteArrayInputStream bis = new ByteArrayInputStream(ASCIIUtility.getBytes(istream));
int count = bis.available();
byte[] bytes = new byte[count];
count = bis.read(bytes, 0, count);
messageMap.put(partContentType, MimeUtility.decodeText(new String(bytes, 0, count, characterSet)));
} else {
messageMap.put(partContentType, MimeUtility.decodeText(part.getContent().toString()));
}
}
}
}//for
}//else
return messageMap;
}
private String extractMultipartMixedMessage(Message message, String characterSet) throws Exception {
Object obj = message.getContent();
Multipart mpart = (Multipart) obj;
for (int i = 0, n = mpart.getCount(); i < n; i++) {
Part part = mpart.getBodyPart(i);
if(decryptMessage && part.getContentType() != null && part.getContentType().equals("application/pkcs7-mime; name=\"smime.p7m\"; smime-type=enveloped-data")) {
part = decryptPart((MimeBodyPart)part);
}
String disposition = part.getDisposition();
if (disposition != null) // this means the part is not an inline image or attached file.
continue;
if (part.isMimeType("multipart/related")) { // if related content then check it's parts
String content = processMultipart(part);
if (content != null)
return content;
}
if (part.isMimeType("multipart/alternative")) {
return extractAlternativeContent(part);
}
if (part.isMimeType("text/plain") || part.isMimeType("text/html")) {
return part.getContent().toString();
}
}
return null;
}
private String processMultipart(Part part) throws IOException,
MessagingException {
Multipart relatedparts = (Multipart)part.getContent();
for(int j=0; j < relatedparts.getCount(); j++){
Part rel = relatedparts.getBodyPart(j);
if (rel.getDisposition() == null) { // again, if it's not an image or attachment(only those have disposition not null)
if (rel.isMimeType("multipart/alternative")) { // last crawl through the alternative formats.
return extractAlternativeContent(rel);
}
}
}
return null;
}
private String extractAlternativeContent(Part part) throws IOException,
MessagingException {
Multipart alternatives = (Multipart)part.getContent();
Object content="";
for (int k = 0; k< alternatives.getCount(); k++) {
Part alternative = alternatives.getBodyPart(k);
if (alternative.getDisposition() == null) {
content = alternative.getContent();
}
}
return content.toString();
}
private MimeBodyPart decryptPart(MimeBodyPart part) throws Exception {
SMIMEEnveloped m = new SMIMEEnveloped(part);
RecipientInformationStore recipients = m.getRecipientInfos();
RecipientInformation recipient = recipients.get(recId);
if(null == recipient) {
StringBuilder errorMessage = new StringBuilder();
errorMessage.append("This email wasn't encrypted with \"" + recId.toString() + "\".\n");
errorMessage.append("The encryption recId is: ");
for(Object rec : recipients.getRecipients()) {
if(rec instanceof RecipientInformation) {
RecipientId recipientId = ((RecipientInformation) rec).getRID();
errorMessage.append("\"" + recipientId.toString() + "\"\n");
}
}
throw new Exception(errorMessage.toString());
}
return SMIMEUtil.toMimeBodyPart(recipient.getContent(ks.getKey(decryptionKeyAlias, null), BOUNCY_CASTLE_PROVIDER));
}
protected String getAttachedFileNames(Part part) throws Exception {
String fileNames = "";
Object content = part.getContent();
if (!(content instanceof Multipart)) {
if(decryptMessage && part.getContentType() != null && part.getContentType().equals(ENCRYPTED_CONTENT_TYPE)) {
part = decryptPart((MimeBodyPart) part);
}
// non-Multipart MIME part ...
// is the file name set for this MIME part? (i.e. is it an attachment?)
if (part.getFileName() != null && !part.getFileName().equals("") && part.getInputStream() != null) {
String fileName = part.getFileName();
// is the file name encoded? (consider it is if it's in the =?charset?encoding?encoded text?= format)
if (fileName.indexOf('?') == -1)
// not encoded (i.e. a simple file name not containing '?')-> just return the file name
return fileName;
// encoded file name -> remove any chars before the first "=?" and after the last "?="
return fileName.substring(fileName.indexOf("=?"), fileName.length() -
((new StringBuilder(fileName)).reverse()).indexOf("=?"));
}
} else {
// a Multipart type of MIME part
Multipart mpart = (Multipart) content;
// iterate through all the parts in this Multipart ...
for (int i = 0, n = mpart.getCount(); i < n; i++) {
if (!fileNames.equals(""))
fileNames += STR_COMMA;
// to the list of attachments built so far append the list of attachments in the current MIME part ...
fileNames += getAttachedFileNames(mpart.getBodyPart(i));
}
}
return fileNames;
}
protected String decodeAttachedFileNames(String attachedFileNames) throws Exception {
StringBuilder sb = new StringBuilder();
String delimiter = "";
// splits the input into comma-separated chunks and decodes each chunk according to its encoding ...
for (String fileName : attachedFileNames.split(STR_COMMA)) {
sb.append(delimiter).append(MimeUtility.decodeText(fileName));
delimiter = STR_COMMA;
}
// return the concatenation of the decoded chunks ...
return sb.toString();
}
protected String convertMessage(String msg) throws Exception {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < msg.length(); i++) {
char c = msg.charAt(i);
if (c == '\n') {
sb.append("<br>");
} else {
sb.append(c);
}
}
return sb.toString();
}
protected int getFolderOpenMode() {
return Folder.READ_ONLY;
}
/**
* This method addresses the mail headers which contain encoded words. The syntax for an encoded word is defined
* in RFC 2047 section 2: http://www.faqs.org/rfcs/rfc2047.html In some cases the header is marked as having a certain charset
* but at decode not all the characters a properly decoded. This is why it can be useful to force it to decode the text with
* a different charset.
* For example when sending an email using Mozilla Thunderbird and JIS X 0213 characters the subject and attachment headers
* are marked as =?Shift_JIS? but the JIS X 0213 characters are only supported in windows-31j.
* <p/>
* This method replaces the charset tag of the header with the new charset provided by the user.
*
* @param header - The header in which the charset will be replaced.
* @param newCharset - The new charset that will be replaced in the given header.
* @return The header with the new charset.
*/
public String changeHeaderCharset(String header, String newCharset) {
return header.replaceAll("=\\?[^\\(\\)<>@,;:/\\[\\]\\?\\.= ]+\\?", "=?" + newCharset + "?"); //match for =?charset?
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
import org.apache.camel.spi.PollingConsumerPollStrategy;
/**
* The Twilio component allows you to interact with the Twilio REST APIs using
* Twilio Java SDK.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface TwilioEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the Twilio component.
*/
public interface TwilioEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedTwilioEndpointConsumerBuilder advanced() {
return (AdvancedTwilioEndpointConsumerBuilder) this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default TwilioEndpointConsumerBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Group: consumer
*/
default TwilioEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: consumer
*/
default TwilioEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Group: consumer
*/
default TwilioEndpointConsumerBuilder sendEmptyMessageWhenIdle(
boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: consumer
*/
default TwilioEndpointConsumerBuilder sendEmptyMessageWhenIdle(
String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder backoffErrorThreshold(
int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder backoffErrorThreshold(
String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder backoffIdleThreshold(
int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder backoffIdleThreshold(
String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder backoffMultiplier(
int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder backoffMultiplier(
String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll. You can also specify time values
* using units, such as 60s (60 seconds), 5m30s (5 minutes and 30
* seconds), and 1h (1 hour).
*
* The option is a: <code>long</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll. You can also specify time values
* using units, such as 60s (60 seconds), 5m30s (5 minutes and 30
* seconds), and 1h (1 hour).
*
* The option will be converted to a <code>long</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts. You can also specify time
* values using units, such as 60s (60 seconds), 5m30s (5 minutes and 30
* seconds), and 1h (1 hour).
*
* The option is a: <code>long</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts. You can also specify time
* values using units, such as 60s (60 seconds), 5m30s (5 minutes and 30
* seconds), and 1h (1 hour).
*
* The option will be converted to a <code>long</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder initialDelay(String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder runLoggingLevel(
LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder runLoggingLevel(
String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder scheduledExecutorService(
ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder scheduledExecutorService(
String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder schedulerProperties(
Map<String, Object> schedulerProperties) {
doSetProperty("schedulerProperties", schedulerProperties);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option will be converted to a
* <code>java.util.Map<java.lang.String, java.lang.Object></code>
* type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder schedulerProperties(
String schedulerProperties) {
doSetProperty("schedulerProperties", schedulerProperties);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder startScheduler(
boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder startScheduler(
String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder useFixedDelay(
boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: scheduler
*/
default TwilioEndpointConsumerBuilder useFixedDelay(String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Twilio component.
*/
public interface AdvancedTwilioEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default TwilioEndpointConsumerBuilder basic() {
return (TwilioEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedTwilioEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedTwilioEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedTwilioEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedTwilioEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option is a:
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedTwilioEndpointConsumerBuilder pollStrategy(
PollingConsumerPollStrategy pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option will be converted to a
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedTwilioEndpointConsumerBuilder pollStrategy(
String pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointConsumerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointConsumerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointConsumerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointConsumerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the Twilio component.
*/
public interface TwilioEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedTwilioEndpointProducerBuilder advanced() {
return (AdvancedTwilioEndpointProducerBuilder) this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default TwilioEndpointProducerBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Group: producer
*/
default TwilioEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: producer
*/
default TwilioEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Advanced builder for endpoint producers for the Twilio component.
*/
public interface AdvancedTwilioEndpointProducerBuilder
extends
EndpointProducerBuilder {
default TwilioEndpointProducerBuilder basic() {
return (TwilioEndpointProducerBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointProducerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointProducerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointProducerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointProducerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint for the Twilio component.
*/
public interface TwilioEndpointBuilder
extends
TwilioEndpointConsumerBuilder, TwilioEndpointProducerBuilder {
default AdvancedTwilioEndpointBuilder advanced() {
return (AdvancedTwilioEndpointBuilder) this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default TwilioEndpointBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
}
/**
* Advanced builder for endpoint for the Twilio component.
*/
public interface AdvancedTwilioEndpointBuilder
extends
AdvancedTwilioEndpointConsumerBuilder, AdvancedTwilioEndpointProducerBuilder {
default TwilioEndpointBuilder basic() {
return (TwilioEndpointBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedTwilioEndpointBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Twilio (camel-twilio)
* The Twilio component allows you to interact with the Twilio REST APIs
* using Twilio Java SDK.
*
* Category: api,messaging,cloud
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-twilio
*
* Syntax: <code>twilio:apiName/methodName</code>
*
* Path parameter: apiName (required)
* What kind of operation to perform
* The value can be one of: ACCOUNT, ADDRESS, APPLICATION,
* AVAILABLE_PHONE_NUMBER_COUNTRY, CALL, CONFERENCE, CONNECT_APP,
* INCOMING_PHONE_NUMBER, KEY, MESSAGE, NEW_KEY, NEW_SIGNING_KEY,
* NOTIFICATION, OUTGOING_CALLER_ID, QUEUE, RECORDING, SHORT_CODE,
* SIGNING_KEY, TOKEN, TRANSCRIPTION, VALIDATION_REQUEST,
* ADDRESS_DEPENDENT_PHONE_NUMBER, AVAILABLE_PHONE_NUMBER_COUNTRY_LOCAL,
* AVAILABLE_PHONE_NUMBER_COUNTRY_MOBILE,
* AVAILABLE_PHONE_NUMBER_COUNTRY_TOLL_FREE, CALL_FEEDBACK,
* CALL_FEEDBACK_SUMMARY, CALL_NOTIFICATION, CALL_RECORDING,
* CONFERENCE_PARTICIPANT, INCOMING_PHONE_NUMBER_LOCAL,
* INCOMING_PHONE_NUMBER_MOBILE, INCOMING_PHONE_NUMBER_TOLL_FREE,
* MESSAGE_FEEDBACK, MESSAGE_MEDIA, QUEUE_MEMBER, RECORDING_ADD_ON_RESULT,
* RECORDING_TRANSCRIPTION, RECORDING_ADD_ON_RESULT_PAYLOAD,
* SIP_CREDENTIAL_LIST, SIP_DOMAIN, SIP_IP_ACCESS_CONTROL_LIST,
* SIP_CREDENTIAL_LIST_CREDENTIAL, SIP_DOMAIN_CREDENTIAL_LIST_MAPPING,
* SIP_DOMAIN_IP_ACCESS_CONTROL_LIST_MAPPING,
* SIP_IP_ACCESS_CONTROL_LIST_IP_ADDRESS, USAGE_RECORD, USAGE_TRIGGER,
* USAGE_RECORD_ALL_TIME, USAGE_RECORD_DAILY, USAGE_RECORD_LAST_MONTH,
* USAGE_RECORD_MONTHLY, USAGE_RECORD_THIS_MONTH, USAGE_RECORD_TODAY,
* USAGE_RECORD_YEARLY, USAGE_RECORD_YESTERDAY
*
* Path parameter: methodName (required)
* What sub operation to use for the selected operation
* The value can be one of: create, delete, fetch, read, update
*/
default TwilioEndpointBuilder twilio(String path) {
class TwilioEndpointBuilderImpl extends AbstractEndpointBuilder implements TwilioEndpointBuilder, AdvancedTwilioEndpointBuilder {
public TwilioEndpointBuilderImpl(String path) {
super("twilio", path);
}
}
return new TwilioEndpointBuilderImpl(path);
}
}
| |
package com.lothrazar.cyclic.block.laser;
import com.lothrazar.cyclic.block.TileBlockEntityCyclic;
import com.lothrazar.cyclic.data.BlockPosDim;
import com.lothrazar.cyclic.data.OffsetEnum;
import com.lothrazar.cyclic.item.datacard.LocationGpsCard;
import com.lothrazar.cyclic.registry.TileRegistry;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.network.chat.Component;
import net.minecraft.network.chat.TextComponent;
import net.minecraft.world.MenuProvider;
import net.minecraft.world.entity.player.Inventory;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.inventory.AbstractContainerMenu;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.phys.AABB;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.common.util.LazyOptional;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.IItemHandler;
import net.minecraftforge.items.ItemStackHandler;
public class TileLaser extends TileBlockEntityCyclic implements MenuProvider {
static enum Fields {
REDSTONE, THICK, RED, GREEN, BLUE, ALPHA, XOFF, YOFF, ZOFF;
}
protected OffsetEnum xOffset = OffsetEnum.CENTER;
protected OffsetEnum yOffset = OffsetEnum.CENTER;
protected OffsetEnum zOffset = OffsetEnum.CENTER;
private int red = 255;
private int green = 0;
private int blue = 0;
private int alpha = 70;
private int thick = 8;
ItemStackHandler inventory = new ItemStackHandler(4) {
@Override
public boolean isItemValid(int slot, ItemStack stack) {
return stack.getItem() instanceof LocationGpsCard;
}
};
private LazyOptional<IItemHandler> inventoryCap = LazyOptional.of(() -> inventory);
public TileLaser(BlockPos pos, BlockState state) {
super(TileRegistry.LASER.get(), pos, state);
this.needsRedstone = 0;
}
// public static void serverTick(Level level, BlockPos blockPos, BlockState blockState, TileLaser e) {
// e.tick();
// }
//
// public static <E extends BlockEntity> void clientTick(Level level, BlockPos blockPos, BlockState blockState, TileLaser e) {
// e.tick();
// }
@Override
public void invalidateCaps() {
inventoryCap.invalidate();
super.invalidateCaps();
}
@Override
public <T> LazyOptional<T> getCapability(Capability<T> cap, Direction side) {
if (cap == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY) {
return inventoryCap.cast();
}
return super.getCapability(cap, side);
}
@Override
public Component getDisplayName() {
return new TextComponent(getType().getRegistryName().getPath());
}
@Override
public AbstractContainerMenu createMenu(int i, Inventory playerInventory, Player playerEntity) {
return new ContainerLaser(i, level, worldPosition, playerInventory, playerEntity);
}
BlockPos getPosTarget() {
//before going to nextpos
//do we have a center offset
BlockPosDim loc = LocationGpsCard.getPosition(inventory.getStackInSlot(0));
if (loc != null && loc.getPos() != null) {
return loc.getPos();
}
return this.getBlockPos();
}
@Override
public AABB getRenderBoundingBox() {
return BlockEntity.INFINITE_EXTENT_AABB;
}
@Override
public int getField(int id) {
switch (Fields.values()[id]) {
case THICK:
return this.thick;
case REDSTONE:
return this.needsRedstone;
case BLUE:
return blue;
case GREEN:
return green;
case RED:
return red;
case ALPHA:
return alpha;
case XOFF:
return this.xOffset.ordinal();
case YOFF:
return this.yOffset.ordinal();
case ZOFF:
return this.zOffset.ordinal();
}
return -1;
}
@Override
public void setField(int id, int value) {
switch (Fields.values()[id]) {
case THICK:
this.thick = value;
break;
case REDSTONE:
this.needsRedstone = value % 2;
break;
case BLUE:
blue = value;
break;
case GREEN:
green = value;
break;
case RED:
red = value;
break;
case ALPHA:
alpha = value;
break;
case XOFF:
if (value >= OffsetEnum.values().length) {
value = 0;
}
this.xOffset = OffsetEnum.values()[value];
break;
case YOFF:
if (value >= OffsetEnum.values().length) {
value = 0;
}
this.yOffset = OffsetEnum.values()[value];
break;
case ZOFF:
if (value >= OffsetEnum.values().length) {
value = 0;
}
this.zOffset = OffsetEnum.values()[value];
break;
}
}
@Override
public void load(CompoundTag tag) {
inventory.deserializeNBT(tag.getCompound(NBTINV));
red = tag.getInt("red");
green = tag.getInt("green");
blue = tag.getInt("blue");
alpha = tag.getInt("alpha");
thick = tag.getInt("thick");
super.load(tag);
}
@Override
public void saveAdditional(CompoundTag tag) {
tag.put(NBTINV, inventory.serializeNBT());
tag.putInt("red", red);
tag.putInt("green", green);
tag.putInt("blue", blue);
tag.putInt("alpha", alpha);
tag.putInt("thick", thick);
super.saveAdditional(tag);
}
public float getRed() {
return red / 255F;
}
public float getBlue() {
return blue / 255F;
}
public float getGreen() {
return green / 255F;
}
public float getAlpha() {
float a = alpha;
return a / 100F;
}
public float getThick() {
float t = thick;
return t / 100F;
}
}
| |
package ca.carleton.gcrc.couch.app.impl;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ca.carleton.gcrc.couch.app.Attachment;
import ca.carleton.gcrc.couch.app.Document;
import ca.carleton.gcrc.couch.app.DocumentDigest;
import ca.carleton.gcrc.couch.app.DocumentUpdateProcess;
public class UpdateSpecifier {
static final private Logger logger = LoggerFactory.getLogger(UpdateSpecifier.class);
/**
* Returns the actions required during a document update to modify
* a target document so that it become equal to a source document.
* @param sourceDoc Source document
* @param targetDoc Document currently on the target location
* @return Specifier that explains what needs to happen during an update
* @throws Exception
*/
static public UpdateSpecifier computeUpdateSpecifier(
Document sourceDoc
,JSONObject targetDoc
) throws Exception {
DigestComputerSha1 digestComputer = new DigestComputerSha1();
DocumentDigest dd = digestComputer.computeDocumentDigest(sourceDoc);
return computeUpdateSpecifier(
sourceDoc
,dd
,targetDoc
,DocumentUpdateProcess.Schedule.UPDATE_UNLESS_MODIFIED
,UpdateObjectComparator.getNunaliitComparator()
);
}
/**
* Returns the actions required during a document update to modify
* a target document so that it become equal to a source document.
* @param sourceDoc Source document
* @param documentDigest Digest computed for source document
* @param targetDoc Document currently on the target location
* @param schedule Specifies the type of update required
* @return Specifier that explains what needs to happen during an update
* @throws Exception
*/
static public UpdateSpecifier computeUpdateSpecifier(
Document sourceDoc
,DocumentDigest documentDigest
,JSONObject targetDoc
,DocumentUpdateProcess.Schedule schedule
,Comparator<JSONObject> objectComparator
) throws Exception {
UpdateSpecifier result = new UpdateSpecifier();
// Verify main document
if( schedule == DocumentUpdateProcess.Schedule.UPDATE_FORCED ){
logger.debug("Update forced by schedule. Mark document modified");
result.setDocumentModified(true);
} else if( null == targetDoc ) {
// Document creation
logger.debug("Target document does not exist. Mark document modified");
result.setDocumentModified(true);
} else {
if( 0 != objectComparator.compare(sourceDoc.getJSONObject(), targetDoc) ){
logger.debug("Documents do not compare as equal. Mark document modified");
result.setDocumentModified(true);
}
}
// Attachments...
// Get attachments from source document
Map<String,Attachment> attachmentsByName = new HashMap<String,Attachment>();
{
Collection<Attachment> attachments = sourceDoc.getAttachments();
if( null != attachments ) {
for(Attachment attachment : attachments){
attachmentsByName.put(attachment.getName(), attachment);
}
}
}
// Figure out which attachments should be deleted
if( null != targetDoc ) {
JSONObject targetAttachments = targetDoc.optJSONObject("_attachments");
if( null != targetAttachments ){
Iterator<?> it = targetAttachments.keys();
while( it.hasNext() ){
Object keyObj = it.next();
if( keyObj instanceof String ) {
String attachmentName = (String)keyObj;
if( false == attachmentsByName.containsKey(attachmentName) ){
// Target document has an attachment not available in the
// source one. Delete.
logger.debug("Documents do not compare as equal. Mark document modified");
result.addAttachmentToDelete(attachmentName);
}
}
}
}
}
// Figure out which attachments should be uploaded
for(Attachment attachment : attachmentsByName.values()){
String attachmentName = attachment.getName();
boolean shouldUpload = false;
if( null == targetDoc ) {
// On creation, upload all attachments
shouldUpload = true;
} else if( schedule == DocumentUpdateProcess.Schedule.UPDATE_FORCED ) {
// On forced update,
shouldUpload = true;
} else {
String attachmentContentType = attachment.getContentType();
shouldUpload = shouldAttachmentBeUploaded(
targetDoc
,attachmentName
,documentDigest.getAttachmentDigest(attachmentName)
,attachmentContentType
);
}
if( shouldUpload ){
result.addAttachmentToUpload(attachmentName);
} else {
result.addAttachmentNotModified(attachmentName);
}
}
return result;
}
static private boolean shouldAttachmentBeUploaded(
JSONObject targetDoc
,String attachmentName
,String attachmentDigest
,String attachmentContentType
) {
JSONObject targetAttachments = targetDoc.optJSONObject("_attachments");
if( null == targetAttachments ) {
// No attachment on target doc. Upload.
return true;
}
JSONObject targetAttachment = targetAttachments.optJSONObject(attachmentName);
if( null == targetAttachment ) {
// Target document does not have an attachment with this name
return true;
}
String targetAttachmentContentType = targetAttachment.optString("content_type");
if( null == targetAttachmentContentType ){
// Attachment should have a content-type
return true;
}
if( false == targetAttachmentContentType.equals(attachmentContentType) ){
// content-type has changed
return true;
}
JSONObject targetManifest = targetDoc.optJSONObject(DocumentManifest.MANIFEST_KEY);
if( null == targetManifest ) {
// Can not verify digest on target document
return true;
}
JSONObject targetAttachmentManifests = targetManifest.optJSONObject("attachments");
if( null == targetAttachmentManifests ) {
// Can not verify digest on target document
return true;
}
JSONObject targetAttachmentManifest = targetAttachmentManifests.optJSONObject(attachmentName);
if( null == targetAttachmentManifest ) {
// Can not verify digest on target document
return true;
}
String targetAttachmentDigest = targetAttachmentManifest.optString("digest");
if( null == targetAttachmentDigest ) {
// Can not verify digest on target document
return true;
} else if( false == targetAttachmentDigest.equals(attachmentDigest) ){
// Digest differs
return true;
}
return false;
}
private boolean documentModified = false;
private Set<String> attachmentsToDelete = new HashSet<String>();
private Set<String> attachmentsToUpload = new HashSet<String>();
private Set<String> attachmentsNotModified = new HashSet<String>();
public boolean isDocumentModified() {
return documentModified;
}
public void setDocumentModified(boolean documentModified) {
this.documentModified = documentModified;
}
public Set<String> getAttachmentsToDelete() {
return attachmentsToDelete;
}
public void addAttachmentToDelete(String attachmentName) {
this.attachmentsToDelete.add(attachmentName);
}
public Set<String> getAttachmentsToUpload() {
return attachmentsToUpload;
}
public void addAttachmentToUpload(String attachmentName) {
this.attachmentsToUpload.add(attachmentName);
}
public Set<String> getAttachmentsNotModified() {
return attachmentsNotModified;
}
public void addAttachmentNotModified(String attachmentName) {
this.attachmentsNotModified.add(attachmentName);
}
public boolean isUpdateRequired(){
if( documentModified ) {
return true;
} else if( attachmentsToDelete.size() > 0 ){
return true;
} else if( attachmentsToUpload.size() > 0 ){
return true;
}
return false;
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.text.format.Time;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
public class Utility {
public static String getPreferredLocation(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
return prefs.getString(context.getString(R.string.pref_location_key),
context.getString(R.string.pref_location_default));
}
public static boolean isMetric(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
return prefs.getString(context.getString(R.string.pref_units_key),
context.getString(R.string.pref_units_metric))
.equals(context.getString(R.string.pref_units_metric));
}
public static String formatTemperature(Context context, double temperature) {
// Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert
// the values here.
String suffix = "\u00B0";
if (!isMetric(context)) {
temperature = (temperature * 1.8) + 32;
}
// For presentation, assume the user doesn't care about tenths of a degree.
return String.format(context.getString(R.string.format_temperature), temperature);
}
static String formatDate(long dateInMilliseconds) {
Date date = new Date(dateInMilliseconds);
return DateFormat.getDateInstance().format(date);
}
// Format used for storing dates in the database. ALso used for converting those strings
// back into date objects for comparison/processing.
public static final String DATE_FORMAT = "yyyyMMdd";
/**
* Helper method to convert the database representation of the date into something to display
* to users. As classy and polished a user experience as "20140102" is, we can do better.
*
* @param context Context to use for resource localization
* @param dateInMillis The date in milliseconds
* @return a user-friendly representation of the date.
*/
public static String getFriendlyDayString(Context context, long dateInMillis) {
// The day string for forecast uses the following logic:
// For today: "Today, June 8"
// For tomorrow: "Tomorrow"
// For the next 5 days: "Wednesday" (just the day name)
// For all days after that: "Mon Jun 8"
Time time = new Time();
time.setToNow();
long currentTime = System.currentTimeMillis();
int julianDay = Time.getJulianDay(dateInMillis, time.gmtoff);
int currentJulianDay = Time.getJulianDay(currentTime, time.gmtoff);
// If the date we're building the String for is today's date, the format
// is "Today, June 24"
if (julianDay == currentJulianDay) {
String today = context.getString(R.string.today);
int formatId = R.string.format_full_friendly_date;
return String.format(context.getString(
formatId,
today,
getFormattedMonthDay(context, dateInMillis)));
} else if (julianDay < currentJulianDay + 7) {
// If the input date is less than a week in the future, just return the day name.
return getDayName(context, dateInMillis);
} else {
// Otherwise, use the form "Mon Jun 3"
SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd");
return shortenedDateFormat.format(dateInMillis);
}
}
/**
* Given a day, returns just the name to use for that day.
* E.g "today", "tomorrow", "wednesday".
*
* @param context Context to use for resource localization
* @param dateInMillis The date in milliseconds
* @return
*/
public static String getDayName(Context context, long dateInMillis) {
// If the date is today, return the localized version of "Today" instead of the actual
// day name.
Time t = new Time();
t.setToNow();
int julianDay = Time.getJulianDay(dateInMillis, t.gmtoff);
int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff);
if (julianDay == currentJulianDay) {
return context.getString(R.string.today);
} else if (julianDay == currentJulianDay + 1) {
return context.getString(R.string.tomorrow);
} else {
Time time = new Time();
time.setToNow();
// Otherwise, the format is just the day of the week (e.g "Wednesday".
SimpleDateFormat dayFormat = new SimpleDateFormat("EEEE");
return dayFormat.format(dateInMillis);
}
}
/**
* Converts db date format to the format "Month day", e.g "June 24".
*
* @param context Context to use for resource localization
* @param dateInMillis The db formatted date string, expected to be of the form specified
* in Utility.DATE_FORMAT
* @return The day in the form of a string formatted "December 6"
*/
public static String getFormattedMonthDay(Context context, long dateInMillis) {
Time time = new Time();
time.setToNow();
SimpleDateFormat dbDateFormat = new SimpleDateFormat(Utility.DATE_FORMAT);
SimpleDateFormat monthDayFormat = new SimpleDateFormat("MMMM dd");
String monthDayString = monthDayFormat.format(dateInMillis);
return monthDayString;
}
public static String getFormattedWind(Context context, float windSpeed, float degrees) {
int windFormat;
if (Utility.isMetric(context)) {
windFormat = R.string.format_wind_kmh;
} else {
windFormat = R.string.format_wind_mph;
windSpeed = .621371192237334f * windSpeed;
}
// From wind direction in degrees, determine compass direction as a string (e.g NW)
// You know what's fun, writing really long if/else statements with tons of possible
// conditions. Seriously, try it!
String direction = "Unknown";
if (degrees >= 337.5 || degrees < 22.5) {
direction = "N";
} else if (degrees >= 22.5 && degrees < 67.5) {
direction = "NE";
} else if (degrees >= 67.5 && degrees < 112.5) {
direction = "E";
} else if (degrees >= 112.5 && degrees < 157.5) {
direction = "SE";
} else if (degrees >= 157.5 && degrees < 202.5) {
direction = "S";
} else if (degrees >= 202.5 && degrees < 247.5) {
direction = "SW";
} else if (degrees >= 247.5 && degrees < 292.5) {
direction = "W";
} else if (degrees >= 292.5 && degrees < 337.5) {
direction = "NW";
}
return String.format(context.getString(windFormat), windSpeed, direction);
}
/**
* Helper method to provide the icon resource id according to the weather condition id returned
* by the OpenWeatherMap call.
*
* @param weatherId from OpenWeatherMap API response
* @return resource id for the corresponding icon. -1 if no relation is found.
*/
public static int getIconResourceForWeatherCondition(int weatherId) {
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
if (weatherId >= 200 && weatherId <= 232) {
return R.drawable.ic_storm;
} else if (weatherId >= 300 && weatherId <= 321) {
return R.drawable.ic_light_rain;
} else if (weatherId >= 500 && weatherId <= 504) {
return R.drawable.ic_rain;
} else if (weatherId == 511) {
return R.drawable.ic_snow;
} else if (weatherId >= 520 && weatherId <= 531) {
return R.drawable.ic_rain;
} else if (weatherId >= 600 && weatherId <= 622) {
return R.drawable.ic_snow;
} else if (weatherId >= 701 && weatherId <= 761) {
return R.drawable.ic_fog;
} else if (weatherId == 761 || weatherId == 781) {
return R.drawable.ic_storm;
} else if (weatherId == 800) {
return R.drawable.ic_clear;
} else if (weatherId == 801) {
return R.drawable.ic_light_clouds;
} else if (weatherId >= 802 && weatherId <= 804) {
return R.drawable.ic_cloudy;
}
return -1;
}
/**
* Helper method to provide the art resource id according to the weather condition id returned
* by the OpenWeatherMap call.
*
* @param weatherId from OpenWeatherMap API response
* @return resource id for the corresponding icon. -1 if no relation is found.
*/
public static int getArtResourceForWeatherCondition(int weatherId) {
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
if (weatherId >= 200 && weatherId <= 232) {
return R.drawable.art_storm;
} else if (weatherId >= 300 && weatherId <= 321) {
return R.drawable.art_light_rain;
} else if (weatherId >= 500 && weatherId <= 504) {
return R.drawable.art_rain;
} else if (weatherId == 511) {
return R.drawable.art_snow;
} else if (weatherId >= 520 && weatherId <= 531) {
return R.drawable.art_rain;
} else if (weatherId >= 600 && weatherId <= 622) {
return R.drawable.art_snow;
} else if (weatherId >= 701 && weatherId <= 761) {
return R.drawable.art_fog;
} else if (weatherId == 761 || weatherId == 781) {
return R.drawable.art_storm;
} else if (weatherId == 800) {
return R.drawable.art_clear;
} else if (weatherId == 801) {
return R.drawable.art_light_clouds;
} else if (weatherId >= 802 && weatherId <= 804) {
return R.drawable.art_clouds;
}
return -1;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Copied from commons-validator:commons-validator:1.6, with [PATCH] modifications */
package jenkins.org.apache.commons.validator.routines;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* <p><b>URL Validation</b> routines.</p>
* Behavior of validation is modified by passing in options:
* <ul>
* <li>ALLOW_2_SLASHES - [FALSE] Allows double '/' characters in the path
* component.</li>
* <li>NO_FRAGMENT- [FALSE] By default fragments are allowed, if this option is
* included then fragments are flagged as illegal.</li>
* <li>ALLOW_ALL_SCHEMES - [FALSE] By default only http, https, and ftp are
* considered valid schemes. Enabling this option will let any scheme pass validation.</li>
* </ul>
*
* <p>Originally based in on php script by Debbie Dyer, validation.php v1.2b, Date: 03/07/02,
* http://javascript.internet.com. However, this validation now bears little resemblance
* to the php original.</p>
* <pre>
* Example of usage:
* Construct a UrlValidator with valid schemes of "http", and "https".
*
* String[] schemes = {"http","https"}.
* UrlValidator urlValidator = new UrlValidator(schemes);
* if (urlValidator.isValidRootUrl("ftp://foo.bar.com/")) {
* System.out.println("url is valid");
* } else {
* System.out.println("url is invalid");
* }
*
* prints "url is invalid"
* If instead the default constructor is used.
*
* UrlValidator urlValidator = new UrlValidator();
* if (urlValidator.isValidRootUrl("ftp://foo.bar.com/")) {
* System.out.println("url is valid");
* } else {
* System.out.println("url is invalid");
* }
*
* prints out "url is valid"
* </pre>
*
* @see
* <a href="http://www.ietf.org/rfc/rfc2396.txt">
* Uniform Resource Identifiers (URI): Generic Syntax
* </a>
*
* @version $Revision: 1783203 $
* @since Validator 1.4
*/
//[PATCH]
@Restricted(NoExternalUse.class)
// end of [PATCH]
public class UrlValidator implements Serializable {
private static final long serialVersionUID = 7557161713937335013L;
private static final int MAX_UNSIGNED_16_BIT_INT = 0xFFFF; // port max
/**
* Allows all validly formatted schemes to pass validation instead of
* supplying a set of valid schemes.
*/
public static final long ALLOW_ALL_SCHEMES = 1 << 0;
/**
* Allow two slashes in the path component of the URL.
*/
public static final long ALLOW_2_SLASHES = 1 << 1;
/**
* Enabling this options disallows any URL fragments.
*/
public static final long NO_FRAGMENTS = 1 << 2;
/**
* Allow local URLs, such as http://localhost/ or http://machine/ .
* This enables a broad-brush check, for complex local machine name
* validation requirements you should create your validator with
* a {@link RegexValidator} instead ({@link #UrlValidator(RegexValidator, long)})
*/
public static final long ALLOW_LOCAL_URLS = 1 << 3; // CHECKSTYLE IGNORE MagicNumber
/**
* This expression derived/taken from the BNF for URI (RFC2396).
*/
private static final String URL_REGEX =
"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?";
// 12 3 4 5 6 7 8 9
private static final Pattern URL_PATTERN = Pattern.compile(URL_REGEX);
/**
* Schema/Protocol (ie. http:, ftp:, file:, etc).
*/
private static final int PARSE_URL_SCHEME = 2;
/**
* Includes hostname/ip and port number.
*/
private static final int PARSE_URL_AUTHORITY = 4;
private static final int PARSE_URL_PATH = 5;
private static final int PARSE_URL_QUERY = 7;
private static final int PARSE_URL_FRAGMENT = 9;
/**
* Protocol scheme (e.g. http, ftp, https).
*/
private static final String SCHEME_REGEX = "^\\p{Alpha}[\\p{Alnum}\\+\\-\\.]*";
private static final Pattern SCHEME_PATTERN = Pattern.compile(SCHEME_REGEX);
// Drop numeric, and "+-." for now
// TODO does not allow for optional userinfo.
// Validation of character set is done by isValidAuthority
private static final String AUTHORITY_CHARS_REGEX = "\\p{Alnum}\\-\\."; // allows for IPV4 but not IPV6
private static final String IPV6_REGEX = "[0-9a-fA-F:]+"; // do this as separate match because : could cause ambiguity with port prefix
// userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
// sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
// We assume that password has the same valid chars as user info
private static final String USERINFO_CHARS_REGEX = "[a-zA-Z0-9%-._~!$&'()*+,;=]";
// since neither ':' nor '@' are allowed chars, we don't need to use non-greedy matching
private static final String USERINFO_FIELD_REGEX =
USERINFO_CHARS_REGEX + "+" + // At least one character for the name
"(?::" + USERINFO_CHARS_REGEX + "*)?@"; // colon and password may be absent
private static final String AUTHORITY_REGEX =
"(?:\\[("+IPV6_REGEX+")\\]|(?:(?:"+USERINFO_FIELD_REGEX+")?([" + AUTHORITY_CHARS_REGEX + "]*)))(?::(\\d*))?(.*)?";
// 1 e.g. user:pass@ 2 3 4
private static final Pattern AUTHORITY_PATTERN = Pattern.compile(AUTHORITY_REGEX);
private static final int PARSE_AUTHORITY_IPV6 = 1;
private static final int PARSE_AUTHORITY_HOST_IP = 2; // excludes userinfo, if present
private static final int PARSE_AUTHORITY_PORT = 3; // excludes leading colon
/**
* Should always be empty. The code currently allows spaces.
*/
private static final int PARSE_AUTHORITY_EXTRA = 4;
private static final String PATH_REGEX = "^(/[-\\w:@&?=+,.!/~*'%$_;\\(\\)]*)?$";
private static final Pattern PATH_PATTERN = Pattern.compile(PATH_REGEX);
private static final String QUERY_REGEX = "^(\\S*)$";
private static final Pattern QUERY_PATTERN = Pattern.compile(QUERY_REGEX);
/**
* Holds the set of current validation options.
*/
private final long options;
/**
* The set of schemes that are allowed to be in a URL.
*/
private final Set<String> allowedSchemes; // Must be lower-case
/**
* Regular expressions used to manually validate authorities if IANA
* domain name validation isn't desired.
*/
private final RegexValidator authorityValidator;
/**
* If no schemes are provided, default to this set.
*/
private static final String[] DEFAULT_SCHEMES = {"http", "https", "ftp"}; // Must be lower-case
/**
* Singleton instance of this class with default schemes and options.
*/
private static final UrlValidator DEFAULT_URL_VALIDATOR = new UrlValidator();
/**
* Returns the singleton instance of this class with default schemes and options.
* @return singleton instance with default schemes and options
*/
public static UrlValidator getInstance() {
return DEFAULT_URL_VALIDATOR;
}
/**
* Create a UrlValidator with default properties.
*/
public UrlValidator() {
this(null);
}
/**
* Behavior of validation is modified by passing in several strings options:
* @param schemes Pass in one or more url schemes to consider valid, passing in
* a null will default to "http,https,ftp" being valid.
* If a non-null schemes is specified then all valid schemes must
* be specified. Setting the ALLOW_ALL_SCHEMES option will
* ignore the contents of schemes.
*/
public UrlValidator(String[] schemes) {
this(schemes, 0L);
}
/**
* Initialize a UrlValidator with the given validation options.
* @param options The options should be set using the public constants declared in
* this class. To set multiple options you simply add them together. For example,
* ALLOW_2_SLASHES + NO_FRAGMENTS enables both of those options.
*/
public UrlValidator(long options) {
this(null, null, options);
}
/**
* Behavior of validation is modified by passing in options:
* @param schemes The set of valid schemes. Ignored if the ALLOW_ALL_SCHEMES option is set.
* @param options The options should be set using the public constants declared in
* this class. To set multiple options you simply add them together. For example,
* ALLOW_2_SLASHES + NO_FRAGMENTS enables both of those options.
*/
public UrlValidator(String[] schemes, long options) {
this(schemes, null, options);
}
/**
* Initialize a UrlValidator with the given validation options.
* @param authorityValidator Regular expression validator used to validate the authority part
* This allows the user to override the standard set of domains.
* @param options Validation options. Set using the public constants of this class.
* To set multiple options, simply add them together:
* <p>{@code ALLOW_2_SLASHES + NO_FRAGMENTS}</p>
* enables both of those options.
*/
public UrlValidator(RegexValidator authorityValidator, long options) {
this(null, authorityValidator, options);
}
/**
* Customizable constructor. Validation behavior is modified by passing in options.
* @param schemes the set of valid schemes. Ignored if the ALLOW_ALL_SCHEMES option is set.
* @param authorityValidator Regular expression validator used to validate the authority part
* @param options Validation options. Set using the public constants of this class.
* To set multiple options, simply add them together:
* <p>{@code ALLOW_2_SLASHES + NO_FRAGMENTS}</p>
* enables both of those options.
*/
public UrlValidator(String[] schemes, RegexValidator authorityValidator, long options) {
this.options = options;
if (isOn(ALLOW_ALL_SCHEMES)) {
allowedSchemes = Collections.emptySet();
} else {
if (schemes == null) {
schemes = DEFAULT_SCHEMES;
}
allowedSchemes = new HashSet<>(schemes.length);
for (String scheme : schemes) {
allowedSchemes.add(scheme.toLowerCase(Locale.ENGLISH));
}
}
this.authorityValidator = authorityValidator;
}
/**
* <p>Checks if a field has a valid url address.</p>
*
* Note that the method calls #isValidAuthority()
* which checks that the domain is valid.
*
* @param value The value validation is being performed on. A {@code null}
* value is considered invalid.
* @return true if the url is valid.
*/
public boolean isValid(String value) {
if (value == null) {
return false;
}
// Check the whole url address structure
Matcher urlMatcher = URL_PATTERN.matcher(value);
if (!urlMatcher.matches()) {
return false;
}
String scheme = urlMatcher.group(PARSE_URL_SCHEME);
if (!isValidScheme(scheme)) {
return false;
}
String authority = urlMatcher.group(PARSE_URL_AUTHORITY);
if ("file".equals(scheme)) {// Special case - file: allows an empty authority
if (authority != null) {
if (authority.contains(":")) { // but cannot allow trailing :
return false;
}
}
// drop through to continue validation
} else { // not file:
// Validate the authority
if (!isValidAuthority(authority)) {
return false;
}
}
if (!isValidPath(urlMatcher.group(PARSE_URL_PATH))) {
return false;
}
if (!isValidQuery(urlMatcher.group(PARSE_URL_QUERY))) {
return false;
}
if (!isValidFragment(urlMatcher.group(PARSE_URL_FRAGMENT))) {
return false;
}
return true;
}
/**
* Validate scheme. If schemes[] was initialized to a non null,
* then only those schemes are allowed.
* Otherwise the default schemes are "http", "https", "ftp".
* Matching is case-blind.
* @param scheme The scheme to validate. A {@code null} value is considered
* invalid.
* @return true if valid.
*/
protected boolean isValidScheme(String scheme) {
if (scheme == null) {
return false;
}
// TODO could be removed if external schemes were checked in the ctor before being stored
if (!SCHEME_PATTERN.matcher(scheme).matches()) {
return false;
}
if (isOff(ALLOW_ALL_SCHEMES) && !allowedSchemes.contains(scheme.toLowerCase(Locale.ENGLISH))) {
return false;
}
return true;
}
/**
* Returns true if the authority is properly formatted. An authority is the combination
* of hostname and port. A {@code null} authority value is considered invalid.
* Note: this implementation validates the domain unless a RegexValidator was provided.
* If a RegexValidator was supplied and it matches, then the authority is regarded
* as valid with no further checks, otherwise the method checks against the
* AUTHORITY_PATTERN and the DomainValidator (ALLOW_LOCAL_URLS)
* @param authority Authority value to validate, allows IDN
* @return true if authority (hostname and port) is valid.
*/
protected boolean isValidAuthority(String authority) {
if (authority == null) {
return false;
}
// check manual authority validation if specified
if (authorityValidator != null && authorityValidator.isValid(authority)) {
return true;
}
// convert to ASCII if possible
final String authorityASCII = DomainValidator.unicodeToASCII(authority);
Matcher authorityMatcher = AUTHORITY_PATTERN.matcher(authorityASCII);
if (!authorityMatcher.matches()) {
return false;
}
// We have to process IPV6 separately because that is parsed in a different group
String ipv6 = authorityMatcher.group(PARSE_AUTHORITY_IPV6);
if (ipv6 != null) {
InetAddressValidator inetAddressValidator = InetAddressValidator.getInstance();
if (!inetAddressValidator.isValidInet6Address(ipv6)) {
return false;
}
} else {
String hostLocation = authorityMatcher.group(PARSE_AUTHORITY_HOST_IP);
// check if authority is hostname or IP address:
// try a hostname first since that's much more likely
DomainValidator domainValidator = DomainValidator.getInstance(isOn(ALLOW_LOCAL_URLS));
if (!domainValidator.isValid(hostLocation)) {
// try an IPv4 address
InetAddressValidator inetAddressValidator = InetAddressValidator.getInstance();
if (!inetAddressValidator.isValidInet4Address(hostLocation)) {
// isn't IPv4, so the URL is invalid
return false;
}
}
String port = authorityMatcher.group(PARSE_AUTHORITY_PORT);
if (port != null && port.length() > 0) {
try {
int iPort = Integer.parseInt(port);
if (iPort < 0 || iPort > MAX_UNSIGNED_16_BIT_INT) {
return false;
}
} catch (NumberFormatException nfe) {
return false; // this can happen for big numbers
}
}
}
String extra = authorityMatcher.group(PARSE_AUTHORITY_EXTRA);
if (extra != null && extra.trim().length() > 0){
return false;
}
return true;
}
/**
* Returns true if the path is valid. A {@code null} value is considered invalid.
* @param path Path value to validate.
* @return true if path is valid.
*/
protected boolean isValidPath(String path) {
if (path == null) {
return false;
}
if (!PATH_PATTERN.matcher(path).matches()) {
return false;
}
try {
URI uri = new URI(null,null,path,null);
String norm = uri.normalize().getPath();
if (norm.startsWith("/../") // Trying to go via the parent dir
|| norm.equals("/..")) { // Trying to go to the parent dir
return false;
}
} catch (URISyntaxException e) {
return false;
}
int slash2Count = countToken("//", path);
if (isOff(ALLOW_2_SLASHES) && slash2Count > 0) {
return false;
}
return true;
}
/**
* Returns true if the query is null or it's a properly formatted query string.
* @param query Query value to validate.
* @return true if query is valid.
*/
protected boolean isValidQuery(String query) {
if (query == null) {
return true;
}
return QUERY_PATTERN.matcher(query).matches();
}
/**
* Returns true if the given fragment is null or fragments are allowed.
* @param fragment Fragment value to validate.
* @return true if fragment is valid.
*/
protected boolean isValidFragment(String fragment) {
if (fragment == null) {
return true;
}
return isOff(NO_FRAGMENTS);
}
/**
* Returns the number of times the token appears in the target.
* @param token Token value to be counted.
* @param target Target value to count tokens in.
* @return the number of tokens.
*/
protected int countToken(String token, String target) {
int tokenIndex = 0;
int count = 0;
while (tokenIndex != -1) {
tokenIndex = target.indexOf(token, tokenIndex);
if (tokenIndex > -1) {
tokenIndex++;
count++;
}
}
return count;
}
/**
* Tests whether the given flag is on. If the flag is not a power of 2
* (ie. 3) this tests whether the combination of flags is on.
*
* @param flag Flag value to check.
*
* @return whether the specified flag value is on.
*/
private boolean isOn(long flag) {
return (options & flag) > 0;
}
/**
* Tests whether the given flag is off. If the flag is not a power of 2
* (ie. 3) this tests whether the combination of flags is off.
*
* @param flag Flag value to check.
*
* @return whether the specified flag value is off.
*/
private boolean isOff(long flag) {
return (options & flag) == 0;
}
// Unit test access to pattern matcher
Matcher matchURL(String value) {
return URL_PATTERN.matcher(value);
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.runtime;
import static com.google.common.base.Strings.isNullOrEmpty;
import com.google.devtools.build.lib.profiler.MemoryProfiler.MemoryProfileStableHeapParameters;
import com.google.devtools.build.lib.profiler.ProfilerTask;
import com.google.devtools.build.lib.runtime.CommandLineEvent.ToolCommandLineEvent;
import com.google.devtools.build.lib.util.OptionsUtils;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.common.options.Converter;
import com.google.devtools.common.options.Converters;
import com.google.devtools.common.options.Converters.AssignmentConverter;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionDocumentationCategory;
import com.google.devtools.common.options.OptionEffectTag;
import com.google.devtools.common.options.OptionMetadataTag;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParsingException;
import com.google.devtools.common.options.TriState;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.logging.Level;
/** Options common to all commands. */
public class CommonCommandOptions extends OptionsBase {
/**
* To create a new incompatible change, see the javadoc for {@link
* AllIncompatibleChangesExpansion}.
*/
@Option(
name = "all_incompatible_changes",
defaultValue = "null",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE},
expansionFunction = AllIncompatibleChangesExpansion.class,
help =
"Enables all options of the form --incompatible_*. Use this option to find places where "
+ "your build may break in the future due to deprecations or other changes.")
public Void allIncompatibleChanges;
@Option(
name = "enable_platform_specific_config",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help =
"If true, Bazel picks up host-OS-specific config lines from bazelrc files. For example, "
+ "if the host OS is Linux and you run bazel build, Bazel picks up lines starting "
+ "with build:linux. Supported OS identifiers are linux, macos, windows, freebsd, "
+ "and openbsd. Enabling this flag is equivalent to using --config=linux on Linux, "
+ "--config=windows on Windows, etc.")
public boolean enablePlatformSpecificConfig;
@Option(
name = "config",
defaultValue = "null",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
allowMultiple = true,
help =
"Selects additional config sections from the rc files; for every <command>, it "
+ "also pulls in the options from <command>:<config> if such a section exists; "
+ "if this section doesn't exist in any .rc file, Blaze fails with an error. "
+ "The config sections and flag combinations they are equivalent to are "
+ "located in the tools/*.blazerc config files.")
public List<String> configs;
@Option(
name = "logging",
defaultValue = "3", // Level.INFO
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS},
converter = Converters.LogLevelConverter.class,
help = "The logging level.")
public Level verbosity;
@Option(
name = "client_cwd",
defaultValue = "",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
metadataTags = {OptionMetadataTag.HIDDEN},
effectTags = {OptionEffectTag.CHANGES_INPUTS},
converter = OptionsUtils.PathFragmentConverter.class,
help = "A system-generated parameter which specifies the client's working directory")
public PathFragment clientCwd;
@Option(
name = "announce_rc",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS},
help = "Whether to announce rc options.")
public boolean announceRcOptions;
@Option(
name = "always_profile_slow_operations",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION},
help = "Whether profiling slow operations is always turned on")
public boolean alwaysProfileSlowOperations;
/** Converter for UUID. Accepts values as specified by {@link UUID#fromString(String)}. */
public static class UUIDConverter implements Converter<UUID> {
@Override
public UUID convert(String input) throws OptionsParsingException {
if (isNullOrEmpty(input)) {
return null;
}
try {
return UUID.fromString(input);
} catch (IllegalArgumentException e) {
throw new OptionsParsingException(
String.format("Value '%s' is not a value UUID.", input), e);
}
}
@Override
public String getTypeDescription() {
return "a UUID";
}
}
/**
* Converter for options (--build_request_id) that accept prefixed UUIDs. Since we do not care
* about the structure of this value after validation, we store it as a string.
*/
public static class PrefixedUUIDConverter implements Converter<String> {
@Override
public String convert(String input) throws OptionsParsingException {
if (isNullOrEmpty(input)) {
return null;
}
// UUIDs that are accepted by UUID#fromString have 36 characters. Interpret the last 36
// characters as an UUID and the rest as a prefix. We do not check anything about the contents
// of the prefix.
try {
int uuidStartIndex = input.length() - 36;
UUID.fromString(input.substring(uuidStartIndex));
} catch (IllegalArgumentException | IndexOutOfBoundsException e) {
throw new OptionsParsingException(
String.format("Value '%s' does not end in a valid UUID.", input), e);
}
return input;
}
@Override
public String getTypeDescription() {
return "An optionally prefixed UUID. The last 36 characters will be verified as a UUID.";
}
}
// Command ID and build request ID can be set either by flag or environment variable. In most
// cases, the internally generated ids should be sufficient, but we allow these to be set
// externally if required. Option wins over environment variable, if both are set.
// TODO(b/67895628) Stop reading ids from the environment after the compatibility window has
// passed.
@Option(
name = "invocation_id",
defaultValue = "",
converter = UUIDConverter.class,
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.BAZEL_MONITORING, OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION},
help =
"Unique identifier, in UUID format, for the command being run. If explicitly specified"
+ " uniqueness must be ensured by the caller. The UUID is printed to stderr, the BEP"
+ " and remote execution protocol.")
public UUID invocationId;
@Option(
name = "build_request_id",
defaultValue = "",
converter = PrefixedUUIDConverter.class,
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.BAZEL_MONITORING, OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION},
metadataTags = {OptionMetadataTag.HIDDEN},
help = "Unique string identifier for the build being run.")
public String buildRequestId;
@Option(
name = "build_metadata",
converter = AssignmentConverter.class,
defaultValue = "null",
allowMultiple = true,
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.TERMINAL_OUTPUT},
help = "Custom key-value string pairs to supply in a build event.")
public List<Map.Entry<String, String>> buildMetadata;
@Option(
name = "oom_message",
defaultValue = "",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.BAZEL_MONITORING, OptionEffectTag.TERMINAL_OUTPUT},
metadataTags = {OptionMetadataTag.HIDDEN},
help = "Custom message to be emitted on an out of memory failure.")
public String oomMessage;
@Option(
name = "generate_json_trace_profile",
oldName = "experimental_generate_json_trace_profile",
defaultValue = "auto",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help =
"If enabled, Bazel profiles the build and writes a JSON-format profile into a file in"
+ " the output base. View profile by loading into chrome://tracing. By default Bazel"
+ " writes the profile for all build-like commands and query.")
public TriState enableTracer;
@Option(
name = "json_trace_compression",
oldName = "experimental_json_trace_compression",
defaultValue = "auto",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help =
"If enabled, Bazel compresses the JSON-format profile with gzip. "
+ "By default, this is decided based on the extension of the file specified in "
+ "--profile.")
public TriState enableTracerCompression;
@Option(
name = "experimental_profile_cpu_usage",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help = "If set, Bazel will measure cpu usage and add it to the JSON profile.")
public boolean enableCpuUsageProfiling;
@Option(
name = "experimental_profile_additional_tasks",
converter = ProfilerTaskConverter.class,
defaultValue = "null",
allowMultiple = true,
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help = "Specifies additional profile tasks to be included in the profile.")
public List<ProfilerTask> additionalProfileTasks;
@Option(
name = "slim_profile",
oldName = "experimental_slim_json_profile",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help =
"Slims down the size of the JSON profile by merging events if the profile gets "
+ " too large.")
public boolean slimProfile;
@Option(
name = "experimental_profile_include_primary_output",
oldName = "experimental_include_primary_output",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help =
"Includes the extra \"out\" attribute in action events that contains the exec path "
+ "to the action's primary output.")
public boolean includePrimaryOutput;
@Option(
name = "experimental_profile_include_target_label",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help = "Includes target label in action events' JSON profile data.")
public boolean profileIncludeTargetLabel;
@Option(
name = "experimental_announce_profile_path",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help = "If enabled, adds the JSON profile path to the log.")
public boolean announceProfilePath;
@Option(
name = "profile",
defaultValue = "null",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
converter = OptionsUtils.PathFragmentConverter.class,
help =
"If set, profile Bazel and write data to the specified "
+ "file. Use bazel analyze-profile to analyze the profile.")
public PathFragment profilePath;
@Option(
name = "starlark_cpu_profile",
defaultValue = "",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.BAZEL_MONITORING},
help = "Writes into the specified file a pprof profile of CPU usage by all Starlark threads.")
public String starlarkCpuProfile;
@Option(
name = "record_full_profiler_data",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help =
"By default, Bazel profiler will record only aggregated data for fast but numerous "
+ "events (such as statting the file). If this option is enabled, profiler will "
+ "record each event - resulting in more precise profiling data but LARGE "
+ "performance hit. Option only has effect if --profile used as well.")
public boolean recordFullProfilerData;
@Option(
name = "memory_profile",
defaultValue = "null",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
converter = OptionsUtils.PathFragmentConverter.class,
help =
"If set, write memory usage data to the specified file at phase ends and stable heap to"
+ " master log at end of build.")
public PathFragment memoryProfilePath;
@Option(
name = "memory_profile_stable_heap_parameters",
defaultValue = "1,0",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.BAZEL_MONITORING},
converter = MemoryProfileStableHeapParameters.Converter.class,
help =
"Tune memory profile's computation of stable heap at end of build. Should be two"
+ " integers separated by a comma. First parameter is the number of GCs to perform."
+ " Second parameter is the number of seconds to wait between GCs.")
public MemoryProfileStableHeapParameters memoryProfileStableHeapParameters;
@Option(
name = "experimental_oom_more_eagerly_threshold",
defaultValue = "100",
documentationCategory = OptionDocumentationCategory.EXECUTION_STRATEGY,
effectTags = {OptionEffectTag.HOST_MACHINE_RESOURCE_OPTIMIZATIONS},
help =
"If this flag is set to a value less than 100, Bazel will OOM if, after two full GC's, "
+ "more than this percentage of the (old gen) heap is still occupied.")
public int oomMoreEagerlyThreshold;
@Option(
name = "heap_dump_on_eager_oom",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.BAZEL_MONITORING},
help =
"Whether to manually output a heap dump if an OOM is thrown due to"
+ " --experimental_oom_more_eagerly_threshold. The dump will be written to"
+ " <invocation_id>.heapdump.hprof")
public boolean heapDumpOnEagerOom;
@Option(
name = "startup_time",
defaultValue = "0",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
metadataTags = {OptionMetadataTag.HIDDEN},
help = "The time in ms the launcher spends before sending the request to the bazel server.")
public long startupTime;
@Option(
name = "extract_data_time",
defaultValue = "0",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
metadataTags = {OptionMetadataTag.HIDDEN},
help = "The time in ms spent on extracting the new bazel version.")
public long extractDataTime;
@Option(
name = "command_wait_time",
defaultValue = "0",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
metadataTags = {OptionMetadataTag.HIDDEN},
help = "The time in ms a command had to wait on a busy Bazel server process.")
public long waitTime;
@Option(
name = "tool_tag",
defaultValue = "",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
help = "A tool name to attribute this Bazel invocation to.")
public String toolTag;
@Option(
name = "restart_reason",
defaultValue = "no_restart",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
metadataTags = {OptionMetadataTag.HIDDEN},
help = "The reason for the server restart.")
public String restartReason;
@Option(
name = "binary_path",
defaultValue = "",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.BAZEL_MONITORING},
metadataTags = {OptionMetadataTag.HIDDEN},
help = "The absolute path of the bazel binary.")
public String binaryPath;
@Option(
name = "experimental_allow_project_files",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.CHANGES_INPUTS},
metadataTags = {OptionMetadataTag.EXPERIMENTAL, OptionMetadataTag.HIDDEN},
help = "Enable processing of +<file> parameters.")
public boolean allowProjectFiles;
@Option(
name = "block_for_lock",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION},
metadataTags = {OptionMetadataTag.HIDDEN},
help =
"If set (the default), a command will block if there is another one running. If "
+ "unset, these commands will immediately return with an error.")
public boolean blockForLock;
// We could accept multiple of these, in the event where there's a chain of tools that led to a
// Bazel invocation. We would not want to expect anything from the order of these, and would need
// to guarantee that the "label" for each command line is unique. Unless a need is demonstrated,
// though, logs are a better place to track this information than flags, so let's try to avoid it.
@Option(
// In May 2018, this feature will have been out for 6 months. If the format we accept has not
// changed in that time, we can remove the "experimental" prefix and tag.
name = "experimental_tool_command_line",
defaultValue = "",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.AFFECTS_OUTPUTS},
// Keep this flag HIDDEN so that it is not listed with our reported command lines, it being
// reported separately.
metadataTags = {OptionMetadataTag.EXPERIMENTAL, OptionMetadataTag.HIDDEN},
converter = ToolCommandLineEvent.Converter.class,
help =
"An extra command line to report with this invocation's command line. Useful for tools "
+ "that invoke Bazel and want the original information that the tool received to be "
+ "logged with the rest of the Bazel invocation.")
public ToolCommandLineEvent toolCommandLine;
@Option(
name = "unconditional_warning",
defaultValue = "null",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.TERMINAL_OUTPUT},
allowMultiple = true,
help =
"A warning that will unconditionally get printed with build warnings and errors. This is"
+ " useful to deprecate bazelrc files or --config definitions. If the intent is to"
+ " effectively deprecate some flag or combination of flags, this is NOT sufficient."
+ " The flag or flags should use the deprecationWarning field in the option"
+ " definition, or the bad combination should be checked for programmatically.")
public List<String> deprecationWarnings;
@Option(
name = "track_incremental_state",
oldName = "keep_incrementality_data",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.BUILD_TIME_OPTIMIZATION,
effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE},
help =
"If false, Blaze will not persist data that allows for invalidation and re-evaluation "
+ "on incremental builds in order to save memory on this build. Subsequent builds "
+ "will not have any incrementality with respect to this one. Usually you will want "
+ "to specify --batch when setting this to false.")
public boolean trackIncrementalState;
@Option(
name = "keep_state_after_build",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.BUILD_TIME_OPTIMIZATION,
effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE},
help =
"If false, Blaze will discard the inmemory state from this build when the build "
+ "finishes. Subsequent builds will not have any incrementality with respect to this "
+ "one.")
public boolean keepStateAfterBuild;
@Option(
name = "repo_env",
converter = Converters.OptionalAssignmentConverter.class,
allowMultiple = true,
defaultValue = "null",
documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS,
effectTags = {OptionEffectTag.ACTION_COMMAND_LINES},
help =
"Specifies additional environment variables to be available only for repository rules."
+ " Note that repository rules see the full environment anyway, but in this way"
+ " configuration information can be passed to repositories through options without"
+ " invalidating the action graph.")
public List<Map.Entry<String, String>> repositoryEnvironment;
/** The option converter to check that the user can only specify legal profiler tasks. */
public static class ProfilerTaskConverter extends EnumConverter<ProfilerTask> {
public ProfilerTaskConverter() {
super(ProfilerTask.class, "profiler task");
}
}
}
| |
// Copyright (c) 2011, Mike Samuel
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// Neither the name of the OWASP nor the names of its contributors may
// be used to endorse or promote products derived from this software
// without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package org.owasp.html;
import org.junit.Test;
import junit.framework.TestCase;
public class SanitizersTest extends TestCase {
@Test
public static final void testFormatting() {
assertEquals("", Sanitizers.FORMATTING.sanitize(null));
assertEquals("", Sanitizers.FORMATTING.sanitize(""));
assertEquals(
"Hello, World!",
Sanitizers.FORMATTING.sanitize("Hello, World!"));
assertEquals(
"Hello, <b>World</b>!",
Sanitizers.FORMATTING.sanitize("Hello, <b>World</b>!"));
assertEquals(
"Hello, <b>World</b>!",
Sanitizers.FORMATTING.sanitize(
"<p>Hello, <b onclick=alert(1337)>World</b>!</p>"));
}
@Test
public static final void testBlockElements() {
assertEquals("", Sanitizers.BLOCKS.sanitize(null));
assertEquals(
"Hello, World!",
Sanitizers.BLOCKS.sanitize("Hello, World!"));
assertEquals(
"Hello, World!",
Sanitizers.BLOCKS.sanitize("Hello, <b>World</b>!"));
assertEquals(
"<p>Hello, World!</p>",
Sanitizers.BLOCKS.sanitize(
"<p onclick=alert(1337)>Hello, <b>World</b>!</p>"));
}
@Test
public static final void testBlockAndFormattingElements() {
PolicyFactory s = Sanitizers.BLOCKS.and(Sanitizers.FORMATTING);
PolicyFactory r1 = Sanitizers.BLOCKS.and(Sanitizers.FORMATTING)
.and(Sanitizers.BLOCKS);
PolicyFactory r2 = Sanitizers.BLOCKS.and(Sanitizers.FORMATTING)
.and(Sanitizers.FORMATTING);
for (PolicyFactory f : new PolicyFactory[] { s, r1, r2 }) {
assertEquals("", f.sanitize(null));
assertEquals("Hello, World!", f.sanitize("Hello, World!"));
assertEquals("Hello, <b>World</b>!", f.sanitize("Hello, <b>World</b>!"));
assertEquals(
"<p>Hello, <b>World</b>!</p>",
f.sanitize("<p onclick=alert(1337)>Hello, <b>World</b>!</p>"));
}
}
@Test
public static final void testStylesAndFormatting() {
PolicyFactory sanitizer = Sanitizers.FORMATTING
.and(Sanitizers.BLOCKS).and(Sanitizers.STYLES).and(Sanitizers.LINKS);
String input = "<span style=\"font-weight:bold;"
+ "text-decoration:underline;background-color:yellow\""
+ ">aaaaaaaaaaaaaaaaaaaaaaa</span>";
String got = sanitizer.sanitize(input);
String want = input;
assertEquals(want, got);
}
@Test
public static final void testAndIntersects() {
PolicyFactory restrictedLink = new HtmlPolicyBuilder()
.allowElements("a")
.allowUrlProtocols("https")
.allowAttributes("href", "title").onElements("a")
.toFactory();
PolicyFactory inline = Sanitizers.FORMATTING.and(Sanitizers.LINKS);
String inputHtml =
"<a href='http://foo.com/'>Hello, <b>World</b></a>"
+ "<a title='!' href='https://foo.com/#!'>!</a>";
PolicyFactory and1 = restrictedLink.and(inline);
PolicyFactory and2 = inline.and(restrictedLink);
assertEquals(
"https-only links",
"Hello, World<a title=\"!\" href=\"https://foo.com/#!\">!</a>",
restrictedLink.sanitize(inputHtml));
assertEquals(
"inline els",
"<a href=\"http://foo.com/\" rel=\"nofollow\">Hello, <b>World</b></a>"
+ "<a href=\"https://foo.com/#!\" rel=\"nofollow\">!</a>",
inline.sanitize(inputHtml));
assertEquals(
"https-only links and inline els",
"Hello, <b>World</b>"
+ "<a title=\"!\" href=\"https://foo.com/#!\" rel=\"nofollow\">!</a>",
and1.sanitize(inputHtml));
assertEquals(
"inline els and https-only links",
"Hello, <b>World</b>"
+ "<a title=\"!\" href=\"https://foo.com/#!\" rel=\"nofollow\">!</a>",
and2.sanitize(inputHtml));
}
@Test
public static final void testImages() {
PolicyFactory s = Sanitizers.IMAGES;
assertEquals(
"foo", s.sanitize("<a href=\"javascript:alert(1337)\">foo</a>"));
assertEquals(
"<img src=\"foo.gif\" />", s.sanitize("<img src=\"foo.gif\">"));
assertEquals(
"", s.sanitize("<img src=\"javascript://alert(1337)\">"));
assertEquals(
"<img src=\"x.gif\" alt=\"y\""
+ " width=\"96\" height=\"64\" border=\"0\" />",
s.sanitize(
"<img src=\"x.gif\" alt=\"y\" width=96 height=64 border=0>"));
assertEquals(
"<img src=\"x.png\" alt=\"y\" height=\"64\" border=\"0\" />",
s.sanitize(
"<img src=\"x.png\" alt=\"y\" width=\"widgy\" height=64 border=0>")
);
}
@Test
public static final void testLinks() {
PolicyFactory s = Sanitizers.LINKS;
assertEquals(
"<a href=\"foo.html\" rel=\"nofollow\">Link text</a>",
s.sanitize("<a href=\"foo.html\">Link text</a>"));
assertEquals(
"<a href=\"foo.html\" rel=\"nofollow\">Link text</a>",
s.sanitize(
"<a href=\"foo.html\" onclick=\"alert(1337)\">Link text</a>"));
assertEquals(
"<a href=\"http://example.com/x.html\" rel=\"nofollow\">Link text</a>",
s.sanitize(
"<a href=\"http://example.com/x.html\""
+ " onclick=\"alert(1337)\">Link text</a>"));
assertEquals(
"<a href=\"https://example.com/x.html\" rel=\"nofollow\">Link text</a>",
s.sanitize(
"<a href=\"https://example.com/x.html\""
+ " onclick=\"alert(1337)\">Link text</a>"));
assertEquals(
"<a href=\"HTTPS://example.com/x.html\" rel=\"nofollow\">Link text</a>",
s.sanitize(
"<a href=\"HTTPS://example.com/x.html\""
+ " onclick=\"alert(1337)\">Link text</a>"));
assertEquals(
"<a href=\"//example.com/x.html\" rel=\"nofollow\">Link text</a>",
s.sanitize(
"<a href=\"//example.com/x.html\""
+ " onclick=\"alert(1337)\">Link text</a>"));
assertEquals(
"Link text",
s.sanitize(
"<a href=\"javascript:alert(1337).html\""
+ " onclick=\"alert(1337)\">Link text</a>"));
// Not a link. Instead, an attempt to intercept URL references that has
// not been explicitly allowed.
assertEquals(
"Header text",
s.sanitize("<a name=\"header\" id=\"header\">Header text</a>"));
}
@Test
public static final void testExplicitlyAllowedProtocolsAreCaseInsensitive() {
// Issue 24.
PolicyFactory s = new HtmlPolicyBuilder()
.allowElements("a")
.allowAttributes("href").onElements("a")
.allowStandardUrlProtocols()
.allowUrlProtocols("file") // Don't try this at home
.toFactory();
String input = (
"<a href='file:///etc/passwd'>Copy and paste this into email</a>"
+ "<a href='FILE:///etc/passwd'>Or this one</a>"
+ "<a href='F\u0130LE:///etc/passwd'>not with Turkish dotted I's</a>"
+ "<a href='fail:///etc/passed'>The fail protocol needs to happen</a>");
String want = (
"<a href=\"file:///etc/passwd\">Copy and paste this into email</a>"
+ "<a href=\"FILE:///etc/passwd\">Or this one</a>"
+ "not with Turkish dotted I's"
+ "The fail protocol needs to happen");
assertEquals(want, s.sanitize(input));
}
@Test
public static final void testIssue9StylesInTables() {
String input = ""
+ "<table style=\"color: rgb(0, 0, 0);"
+ " font-family: Arial, Geneva, sans-serif;\">"
+ "<tbody>"
+ "<tr>"
+ "<th>Column One</th><th>Column Two</th>"
+ "</tr>"
+ "<tr>"
+ "<td align=\"center\""
+ " style=\"background-color: rgb(255, 255, 254);\">"
+ "<font size=\"2\">Size 2</font></td>"
+ "<td align=\"center\""
+ " style=\"background-color: rgb(255, 255, 254);\">"
+ "<font size=\"7\">Size 7</font></td>"
+ "</tr>"
+ "</tbody>"
+ "</table>";
PolicyFactory s = new HtmlPolicyBuilder()
.allowElements("table", "tbody", "thead", "tr", "td", "th")
.allowCommonBlockElements()
.allowCommonInlineFormattingElements()
.allowStyling()
.allowAttributes("align").matching(true, "left", "center", "right")
.onElements("table", "tr", "td", "th")
.allowAttributes("size").onElements("font", "img")
.toFactory();
String sanitized = ""
+ "<table style=\"color:rgb( 0 , 0 , 0 );"
+ "font-family:'arial' , 'geneva' , sans-serif\">"
+ "<tbody>"
+ "<tr>"
+ "<th>Column One</th><th>Column Two</th>"
+ "</tr>"
+ "<tr>"
+ "<td align=\"center\""
+ " style=\"background-color:rgb( 255 , 255 , 254 )\">"
+ "<font size=\"2\">Size 2</font></td>"
+ "<td align=\"center\""
+ " style=\"background-color:rgb( 255 , 255 , 254 )\">"
+ "<font size=\"7\">Size 7</font></td>"
+ "</tr>"
+ "</tbody>"
+ "</table>";
assertEquals(sanitized, s.sanitize(input));
}
@Test
public static final void testSkipIfEmptyUnionsProperly() {
// Issue 23
PolicyFactory extras = new HtmlPolicyBuilder()
.allowWithoutAttributes("span", "div")
.allowElements("span", "div", "textarea")
// This is not the proper way to require the attribute disabled on
// textareas. This is a test. This is only a test.
.allowAttributes("disabled").onElements("textarea")
.disallowWithoutAttributes("textarea")
.toFactory();
PolicyFactory policy = Sanitizers.FORMATTING
.and(Sanitizers.BLOCKS)
.and(Sanitizers.IMAGES)
.and(Sanitizers.STYLES)
.and(extras);
String input =
"<textarea>text</textarea><textarea disabled></textarea>"
+ "<div onclick='redirect()'><span>Styled by span</span></div>";
String want = "text<textarea disabled=\"disabled\"></textarea>"
+ "<div><span>Styled by span</span></div>";
assertEquals(want, policy.sanitize(input));
}
}
| |
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
/**
* This class runs and displays
* the program and makes the menu
* bar options etc.
*
* @author Eileen Balci
* @version 20.3.2007
*/
public class MineSweeperDriver {
private static MineSweeperGUI game = new MineSweeperGUI();
/**
* Runs the Mine Sweeper Game
*/
public static void main(String[] args) {
JFrame window = new JFrame("MINE SWEEPER");
window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
window.getContentPane().add(game);
makeMenuBar(window);//adds the menu bar
//makes it so users can't resize window
window.setResizable(false);
window.pack();
window.setVisible(true);
}
/**
* Create the main frame's menu bar.
* which will hold menu options such as
* Quit which will quit the program
* and
* New Game which will start a new game!
*
* Taken from:
* Objects First with Java a Pratical Intro Using BlueJ
* Authors: Michael Kolling and David J Barnes
* Modified by: Eileen Balci
*/
private static void makeMenuBar(JFrame frame)
{
//used to make shortcut keys
final int SHORTCUT_MASK =
Toolkit.getDefaultToolkit().getMenuShortcutKeyMask();
JMenuBar menubar = new JMenuBar();
frame.setJMenuBar(menubar);
JMenu menu;
JMenuItem item;
// create the Game menu
menu = new JMenu("Game");
menubar.add(menu);
//will make a new game when user selects it
//or if user uses the shortcut ctrl+N
item = new JMenuItem("New Game");
item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_N, SHORTCUT_MASK));
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
game.addBackComponents();
}
});
menu.add(item);//adds new game to menu
menu.addSeparator();//separates the new game from change color
//will creates a beginner game
item = new JMenuItem("Change Color");
item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_M, SHORTCUT_MASK));
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
game.changeColor();
}
});
menu.add(item);//adds change color to menu
menu.addSeparator(); //separates change color from difficulty settings
//will creates a beginner game
item = new JMenuItem("Beginner");
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
//haven't gotten this to work
}
});
menu.add(item);//adds Beginner to menu
//will creates an intermediate game
item = new JMenuItem("Intermediate");
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
//haven't gotten this to work
}
});
menu.add(item);//adds Intermediate to menu
//will create an expert leveled game
item = new JMenuItem("Expert");
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
//haven't gotten this to work
}
});
menu.add(item);//adds Expert to menu
menu.addSeparator();//separates the standard difficulties from the custom one
//will create a custom game board the user wants
item = new JMenuItem("Custom...");
item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_C, SHORTCUT_MASK));
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
//haven't gotten this to work
}
});
menu.add(item);//adds reset to menu
menu.addSeparator();//separates the new game and reset options from quit
//will allow user to quit but selecting it from the menu
//or by useing the shortcut ctr+Q
item = new JMenuItem("Quit");
item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_Q, SHORTCUT_MASK));
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
quit();
}
});
menu.add(item);
// put a spacer into the menubar, so the next menu appears to the right
menubar.add(Box.createHorizontalGlue());
// create the Help menu
menu = new JMenu("Help");
menubar.add(menu);
item = new JMenuItem("How To Play...");
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e)
{
showAbout();
}
});
menu.add(item);
}
/**
* Quit function: quit the application.
*/
private static void quit()
{
System.exit(0);
}
/**
* An explaination of the
* Connect Four game and how to play.
*/
private static void showAbout()
{
JOptionPane.showMessageDialog(null,
"MINE SWEEPER\n" +
"A one player game that involves puzzle solving skills. " +
"The grid holds a number of mines\n" +
"if you click on a square and a mine pops up it's game over!" +
" The object of the game is to clear all the squares without\n" +
"clicking on a mine. If you can do this, you win.\n\n" +
"Sometimes when you click on a square a number (from 1-8) will appear." +
" \nThis tells you how many mines are touching the square.\n" +
"The mine can be touching a square" +
" at the corners and sides \n(there are 8 places a mine can touch a\n" +
"single square in)." +
" use the settings to make the game harder" +
" \nif you single Right click you can place a flag at a location you\n" +
"think a mine might be located at, if you want to remove the flag, " +
"double Right click.\n\n\nENJOY THE GAME!");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.util;
import static org.apache.phoenix.query.BaseTest.generateUniqueName;
import static org.apache.phoenix.query.QueryConstants.MILLIS_IN_DAY;
import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN_FAMILY_NAME;
import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN_NAME;
import static org.apache.phoenix.util.PhoenixRuntime.CONNECTIONLESS;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR;
import static org.apache.phoenix.util.PhoenixRuntime.PHOENIX_TEST_DRIVER_URL_PARAM;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.ServerSocket;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compile.AggregationManager;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.compile.SequenceManager;
import org.apache.phoenix.compile.StatementContext;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse;
import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService;
import org.apache.phoenix.execute.MutationState;
import org.apache.phoenix.expression.AndExpression;
import org.apache.phoenix.expression.ByteBasedLikeExpression;
import org.apache.phoenix.expression.ComparisonExpression;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.expression.InListExpression;
import org.apache.phoenix.expression.KeyValueColumnExpression;
import org.apache.phoenix.expression.LiteralExpression;
import org.apache.phoenix.expression.NotExpression;
import org.apache.phoenix.expression.OrExpression;
import org.apache.phoenix.expression.RowKeyColumnExpression;
import org.apache.phoenix.expression.StringBasedLikeExpression;
import org.apache.phoenix.expression.aggregator.ClientAggregators;
import org.apache.phoenix.expression.function.SingleAggregateFunction;
import org.apache.phoenix.expression.function.SubstrFunction;
import org.apache.phoenix.expression.function.SumAggregateFunction;
import org.apache.phoenix.filter.MultiCQKeyValueComparisonFilter;
import org.apache.phoenix.filter.MultiEncodedCQKeyValueComparisonFilter;
import org.apache.phoenix.filter.MultiKeyValueComparisonFilter;
import org.apache.phoenix.filter.RowKeyComparisonFilter;
import org.apache.phoenix.filter.SingleCQKeyValueComparisonFilter;
import org.apache.phoenix.filter.SingleKeyValueComparisonFilter;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.jdbc.PhoenixPreparedStatement;
import org.apache.phoenix.jdbc.PhoenixStatement;
import org.apache.phoenix.parse.LikeParseNode.LikeType;
import org.apache.phoenix.query.ConnectionQueryServices;
import org.apache.phoenix.query.KeyRange;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PIndexState;
import org.apache.phoenix.schema.PLongColumn;
import org.apache.phoenix.schema.PName;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTable.QualifierEncodingScheme;
import org.apache.phoenix.schema.PTableKey;
import org.apache.phoenix.schema.RowKeyValueAccessor;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.TableRef;
import org.apache.phoenix.schema.stats.GuidePostsInfo;
import org.apache.phoenix.schema.stats.GuidePostsKey;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.transaction.TransactionFactory;
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
public class TestUtil {
private static final Log LOG = LogFactory.getLog(TestUtil.class);
private static final Long ZERO = new Long(0);
public static final String DEFAULT_SCHEMA_NAME = "S";
public static final String DEFAULT_DATA_TABLE_NAME = "T";
public static final String DEFAULT_INDEX_TABLE_NAME = "I";
public static final String DEFAULT_DATA_TABLE_FULL_NAME = SchemaUtil.getTableName(DEFAULT_SCHEMA_NAME, "T");
public static final String DEFAULT_INDEX_TABLE_FULL_NAME = SchemaUtil.getTableName(DEFAULT_SCHEMA_NAME, "I");
public static final String TEST_TABLE_SCHEMA = "(" +
" varchar_pk VARCHAR NOT NULL, " +
" char_pk CHAR(10) NOT NULL, " +
" int_pk INTEGER NOT NULL, "+
" long_pk BIGINT NOT NULL, " +
" decimal_pk DECIMAL(31, 10) NOT NULL, " +
" date_pk DATE NOT NULL, " +
" a.varchar_col1 VARCHAR, " +
" a.char_col1 CHAR(10), " +
" a.int_col1 INTEGER, " +
" a.long_col1 BIGINT, " +
" a.decimal_col1 DECIMAL(31, 10), " +
" a.date1 DATE, " +
" b.varchar_col2 VARCHAR, " +
" b.char_col2 CHAR(10), " +
" b.int_col2 INTEGER, " +
" b.long_col2 BIGINT, " +
" b.decimal_col2 DECIMAL(31, 10), " +
" b.date2 DATE " +
" CONSTRAINT pk PRIMARY KEY (varchar_pk, char_pk, int_pk, long_pk DESC, decimal_pk, date_pk)) ";
private TestUtil() {
}
public static final String CF_NAME = "a";
public static final byte[] CF = Bytes.toBytes(CF_NAME);
public static final String CF2_NAME = "b";
public final static String A_VALUE = "a";
public final static byte[] A = Bytes.toBytes(A_VALUE);
public final static String B_VALUE = "b";
public final static byte[] B = Bytes.toBytes(B_VALUE);
public final static String C_VALUE = "c";
public final static byte[] C = Bytes.toBytes(C_VALUE);
public final static String D_VALUE = "d";
public final static byte[] D = Bytes.toBytes(D_VALUE);
public final static String E_VALUE = "e";
public final static byte[] E = Bytes.toBytes(E_VALUE);
public final static String ROW1 = "00A123122312312";
public final static String ROW2 = "00A223122312312";
public final static String ROW3 = "00A323122312312";
public final static String ROW4 = "00A423122312312";
public final static String ROW5 = "00B523122312312";
public final static String ROW6 = "00B623122312312";
public final static String ROW7 = "00B723122312312";
public final static String ROW8 = "00B823122312312";
public final static String ROW9 = "00C923122312312";
public final static String PARENTID1 = "0500x0000000001";
public final static String PARENTID2 = "0500x0000000002";
public final static String PARENTID3 = "0500x0000000003";
public final static String PARENTID4 = "0500x0000000004";
public final static String PARENTID5 = "0500x0000000005";
public final static String PARENTID6 = "0500x0000000006";
public final static String PARENTID7 = "0500x0000000007";
public final static String PARENTID8 = "0500x0000000008";
public final static String PARENTID9 = "0500x0000000009";
public final static List<String> PARENTIDS = Lists.newArrayList(PARENTID1, PARENTID2, PARENTID3, PARENTID4, PARENTID5, PARENTID6, PARENTID7, PARENTID8, PARENTID9);
public final static String ENTITYHISTID1 = "017x00000000001";
public final static String ENTITYHISTID2 = "017x00000000002";
public final static String ENTITYHISTID3 = "017x00000000003";
public final static String ENTITYHISTID4 = "017x00000000004";
public final static String ENTITYHISTID5 = "017x00000000005";
public final static String ENTITYHISTID6 = "017x00000000006";
public final static String ENTITYHISTID7 = "017x00000000007";
public final static String ENTITYHISTID8 = "017x00000000008";
public final static String ENTITYHISTID9 = "017x00000000009";
public final static List<String> ENTITYHISTIDS = Lists.newArrayList(ENTITYHISTID1, ENTITYHISTID2, ENTITYHISTID3, ENTITYHISTID4, ENTITYHISTID5, ENTITYHISTID6, ENTITYHISTID7, ENTITYHISTID8, ENTITYHISTID9);
public static final String LOCALHOST = "localhost";
public static final String PHOENIX_JDBC_URL = JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + LOCALHOST + JDBC_PROTOCOL_TERMINATOR + PHOENIX_TEST_DRIVER_URL_PARAM;
public static final String PHOENIX_CONNECTIONLESS_JDBC_URL = JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + CONNECTIONLESS + JDBC_PROTOCOL_TERMINATOR + PHOENIX_TEST_DRIVER_URL_PARAM;
public static final String TEST_SCHEMA_FILE_NAME = "config" + File.separator + "test-schema.xml";
public static final String CED_SCHEMA_FILE_NAME = "config" + File.separator + "schema.xml";
public static final String ENTITY_HISTORY_TABLE_NAME = "ENTITY_HISTORY";
public static final String ENTITY_HISTORY_SALTED_TABLE_NAME = "ENTITY_HISTORY_SALTED";
public static final String ATABLE_NAME = "ATABLE";
public static final String TABLE_WITH_ARRAY = "TABLE_WITH_ARRAY";
public static final String SUM_DOUBLE_NAME = "SumDoubleTest";
public static final String ATABLE_SCHEMA_NAME = "";
public static final String BTABLE_NAME = "BTABLE";
public static final String STABLE_NAME = "STABLE";
public static final String STABLE_PK_NAME = "ID";
public static final String STABLE_SCHEMA_NAME = "";
public static final String CUSTOM_ENTITY_DATA_FULL_NAME = "CORE.CUSTOM_ENTITY_DATA";
public static final String CUSTOM_ENTITY_DATA_NAME = "CUSTOM_ENTITY_DATA";
public static final String CUSTOM_ENTITY_DATA_SCHEMA_NAME = "CORE";
public static final String HBASE_NATIVE = "HBASE_NATIVE";
public static final String HBASE_NATIVE_SCHEMA_NAME = "";
public static final String HBASE_DYNAMIC_COLUMNS = "HBASE_DYNAMIC_COLUMNS";
public static final String HBASE_DYNAMIC_COLUMNS_SCHEMA_NAME = "";
public static final String PRODUCT_METRICS_NAME = "PRODUCT_METRICS";
public static final String PTSDB_NAME = "PTSDB";
public static final String PTSDB2_NAME = "PTSDB2";
public static final String PTSDB3_NAME = "PTSDB3";
public static final String PTSDB_SCHEMA_NAME = "";
public static final String FUNKY_NAME = "FUNKY_NAMES";
public static final String MULTI_CF_NAME = "MULTI_CF";
public static final String MDTEST_NAME = "MDTEST";
public static final String MDTEST_SCHEMA_NAME = "";
public static final String KEYONLY_NAME = "KEYONLY";
public static final String TABLE_WITH_SALTING = "TABLE_WITH_SALTING";
public static final String INDEX_DATA_SCHEMA = "INDEX_TEST";
public static final String INDEX_DATA_TABLE = "INDEX_DATA_TABLE";
public static final String MUTABLE_INDEX_DATA_TABLE = "MUTABLE_INDEX_DATA_TABLE";
public static final String TRANSACTIONAL_DATA_TABLE = "TRANSACTIONAL_DATA_TABLE";
public static final String JOIN_SCHEMA = "Join";
public static final String JOIN_ORDER_TABLE = "OrderTable";
public static final String JOIN_CUSTOMER_TABLE = "CustomerTable";
public static final String JOIN_ITEM_TABLE = "ItemTable";
public static final String JOIN_SUPPLIER_TABLE = "SupplierTable";
public static final String JOIN_COITEM_TABLE = "CoitemTable";
public static final String JOIN_ORDER_TABLE_FULL_NAME = '"' + JOIN_SCHEMA + "\".\"" + JOIN_ORDER_TABLE + '"';
public static final String JOIN_CUSTOMER_TABLE_FULL_NAME = '"' + JOIN_SCHEMA + "\".\"" + JOIN_CUSTOMER_TABLE + '"';
public static final String JOIN_ITEM_TABLE_FULL_NAME = '"' + JOIN_SCHEMA + "\".\"" + JOIN_ITEM_TABLE + '"';
public static final String JOIN_SUPPLIER_TABLE_FULL_NAME = '"' + JOIN_SCHEMA + "\".\"" + JOIN_SUPPLIER_TABLE + '"';
public static final String JOIN_COITEM_TABLE_FULL_NAME = '"' + JOIN_SCHEMA + "\".\"" + JOIN_COITEM_TABLE + '"';
public static final String JOIN_ORDER_TABLE_DISPLAY_NAME = JOIN_SCHEMA + "." + JOIN_ORDER_TABLE;
public static final String JOIN_CUSTOMER_TABLE_DISPLAY_NAME = JOIN_SCHEMA + "." + JOIN_CUSTOMER_TABLE;
public static final String JOIN_ITEM_TABLE_DISPLAY_NAME = JOIN_SCHEMA + "." + JOIN_ITEM_TABLE;
public static final String JOIN_SUPPLIER_TABLE_DISPLAY_NAME = JOIN_SCHEMA + "." + JOIN_SUPPLIER_TABLE;
public static final String JOIN_COITEM_TABLE_DISPLAY_NAME = JOIN_SCHEMA + "." + JOIN_COITEM_TABLE;
public static final String BINARY_NAME = "BinaryTable";
/**
* Read-only properties used by all tests
*/
public static final Properties TEST_PROPERTIES = new Properties() {
@Override
public String put(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public Object remove(Object key) {
throw new UnsupportedOperationException();
}
};
public static byte[][] getSplits(String tenantId) {
return new byte[][] {
HConstants.EMPTY_BYTE_ARRAY,
Bytes.toBytes(tenantId + "00A"),
Bytes.toBytes(tenantId + "00B"),
Bytes.toBytes(tenantId + "00C"),
};
}
public static void assertRoundEquals(BigDecimal bd1, BigDecimal bd2) {
bd1 = bd1.round(PDataType.DEFAULT_MATH_CONTEXT);
bd2 = bd2.round(PDataType.DEFAULT_MATH_CONTEXT);
if (bd1.compareTo(bd2) != 0) {
fail("expected:<" + bd1 + "> but was:<" + bd2 + ">");
}
}
public static BigDecimal computeAverage(double sum, long count) {
return BigDecimal.valueOf(sum).divide(BigDecimal.valueOf(count), PDataType.DEFAULT_MATH_CONTEXT);
}
public static BigDecimal computeAverage(long sum, long count) {
return BigDecimal.valueOf(sum).divide(BigDecimal.valueOf(count), PDataType.DEFAULT_MATH_CONTEXT);
}
public static Expression constantComparison(CompareOp op, PColumn c, Object o) {
return new ComparisonExpression(Arrays.<Expression>asList(new KeyValueColumnExpression(c), LiteralExpression.newConstant(o)), op);
}
public static Expression kvColumn(PColumn c) {
return new KeyValueColumnExpression(c);
}
public static Expression pkColumn(PColumn c, List<PColumn> columns) {
return new RowKeyColumnExpression(c, new RowKeyValueAccessor(columns, columns.indexOf(c)));
}
public static Expression constantComparison(CompareOp op, Expression e, Object o) {
return new ComparisonExpression(Arrays.asList(e, LiteralExpression.newConstant(o)), op);
}
private static boolean useByteBasedRegex(StatementContext context) {
return context
.getConnection()
.getQueryServices()
.getProps()
.getBoolean(QueryServices.USE_BYTE_BASED_REGEX_ATTRIB,
QueryServicesOptions.DEFAULT_USE_BYTE_BASED_REGEX);
}
public static Expression like(Expression e, Object o, StatementContext context) {
return useByteBasedRegex(context)?
ByteBasedLikeExpression.create(Arrays.asList(e, LiteralExpression.newConstant(o)), LikeType.CASE_SENSITIVE):
StringBasedLikeExpression.create(Arrays.asList(e, LiteralExpression.newConstant(o)), LikeType.CASE_SENSITIVE);
}
public static Expression ilike(Expression e, Object o, StatementContext context) {
return useByteBasedRegex(context)?
ByteBasedLikeExpression.create(Arrays.asList(e, LiteralExpression.newConstant(o)), LikeType.CASE_INSENSITIVE):
StringBasedLikeExpression.create(Arrays.asList(e, LiteralExpression.newConstant(o)), LikeType.CASE_INSENSITIVE);
}
public static Expression substr(Expression e, Object offset, Object length) {
return new SubstrFunction(Arrays.asList(e, LiteralExpression.newConstant(offset), LiteralExpression.newConstant(length)));
}
public static Expression substr2(Expression e, Object offset) {
return new SubstrFunction(Arrays.asList(e, LiteralExpression.newConstant(offset), LiteralExpression.newConstant(null)));
}
public static Expression columnComparison(CompareOp op, Expression c1, Expression c2) {
return new ComparisonExpression(Arrays.<Expression>asList(c1, c2), op);
}
public static SingleKeyValueComparisonFilter singleKVFilter(Expression e) {
return new SingleCQKeyValueComparisonFilter(e);
}
public static RowKeyComparisonFilter rowKeyFilter(Expression e) {
return new RowKeyComparisonFilter(e, QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES);
}
public static MultiKeyValueComparisonFilter multiKVFilter(Expression e) {
return new MultiCQKeyValueComparisonFilter(e, false, ByteUtil.EMPTY_BYTE_ARRAY);
}
public static MultiEncodedCQKeyValueComparisonFilter multiEncodedKVFilter(Expression e, QualifierEncodingScheme encodingScheme) {
return new MultiEncodedCQKeyValueComparisonFilter(e, encodingScheme, false, null);
}
public static Expression and(Expression... expressions) {
return new AndExpression(Arrays.asList(expressions));
}
public static Expression not(Expression expression) {
return new NotExpression(expression);
}
public static Expression or(Expression... expressions) {
return new OrExpression(Arrays.asList(expressions));
}
public static Expression in(Expression... expressions) throws SQLException {
return InListExpression.create(Arrays.asList(expressions), false, new ImmutableBytesWritable(), true);
}
public static Expression in(Expression e, Object... literals) throws SQLException {
PDataType childType = e.getDataType();
List<Expression> expressions = new ArrayList<Expression>(literals.length + 1);
expressions.add(e);
for (Object o : literals) {
expressions.add(LiteralExpression.newConstant(o, childType));
}
return InListExpression.create(expressions, false, new ImmutableBytesWritable(), true);
}
public static void assertDegenerate(StatementContext context) {
Scan scan = context.getScan();
assertDegenerate(scan);
}
public static void assertDegenerate(Scan scan) {
assertNull(scan.getFilter());
assertArrayEquals(KeyRange.EMPTY_RANGE.getLowerRange(), scan.getStartRow());
assertArrayEquals(KeyRange.EMPTY_RANGE.getLowerRange(), scan.getStopRow());
assertEquals(null,scan.getFilter());
}
public static void assertNotDegenerate(Scan scan) {
assertFalse(
Bytes.compareTo(KeyRange.EMPTY_RANGE.getLowerRange(), scan.getStartRow()) == 0 &&
Bytes.compareTo(KeyRange.EMPTY_RANGE.getLowerRange(), scan.getStopRow()) == 0);
}
public static void assertEmptyScanKey(Scan scan) {
assertNull(scan.getFilter());
assertArrayEquals(ByteUtil.EMPTY_BYTE_ARRAY, scan.getStartRow());
assertArrayEquals(ByteUtil.EMPTY_BYTE_ARRAY, scan.getStopRow());
assertEquals(null,scan.getFilter());
}
/**
* Does a deep comparison of two Results, down to the byte arrays.
* @param res1 first result to compare
* @param res2 second result to compare
* @throws Exception Every difference is throwing an exception
*/
public static void compareTuples(Tuple res1, Tuple res2)
throws Exception {
if (res2 == null) {
throw new Exception("There wasn't enough rows, we stopped at "
+ res1);
}
if (res1.size() != res2.size()) {
throw new Exception("This row doesn't have the same number of KVs: "
+ res1.toString() + " compared to " + res2.toString());
}
for (int i = 0; i < res1.size(); i++) {
Cell ourKV = res1.getValue(i);
Cell replicatedKV = res2.getValue(i);
if (!ourKV.equals(replicatedKV)) {
throw new Exception("This result was different: "
+ res1.toString() + " compared to " + res2.toString());
}
}
}
public static void clearMetaDataCache(Connection conn) throws Throwable {
PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
Table htable = pconn.getQueryServices().getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES);
htable.coprocessorService(MetaDataService.class, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, new Batch.Call<MetaDataService, ClearCacheResponse>() {
@Override
public ClearCacheResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<ClearCacheResponse> rpcCallback =
new BlockingRpcCallback<ClearCacheResponse>();
ClearCacheRequest.Builder builder = ClearCacheRequest.newBuilder();
instance.clearCache(controller, builder.build(), rpcCallback);
if(controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
}
public static void closeStatement(Statement stmt) {
try {
stmt.close();
} catch (Throwable ignore) {}
}
public static void closeConnection(Connection conn) {
try {
conn.close();
} catch (Throwable ignore) {}
}
public static void closeStmtAndConn(Statement stmt, Connection conn) {
closeStatement(stmt);
closeConnection(conn);
}
public static void bindParams(PhoenixPreparedStatement stmt, List<Object> binds) throws SQLException {
for (int i = 0; i < binds.size(); i++) {
stmt.setObject(i+1, binds.get(i));
}
}
/**
* @param conn
* connection to be used
* @param sortOrder
* sort order of column contain input values
* @param id
* id of the row being inserted
* @param input
* input to be inserted
*/
public static void upsertRow(Connection conn, String tableName, String sortOrder, int id, Object input) throws SQLException {
String dml = String.format("UPSERT INTO " + tableName + "_%s VALUES(?,?)", sortOrder);
PreparedStatement stmt = conn.prepareStatement(dml);
stmt.setInt(1, id);
if (input instanceof String)
stmt.setString(2, (String) input);
else if (input instanceof Integer)
stmt.setInt(2, (Integer) input);
else if (input instanceof Double)
stmt.setDouble(2, (Double) input);
else if (input instanceof Float)
stmt.setFloat(2, (Float) input);
else if (input instanceof Boolean)
stmt.setBoolean(2, (Boolean) input);
else if (input instanceof Long)
stmt.setLong(2, (Long) input);
else
throw new UnsupportedOperationException("" + input.getClass() + " is not supported by upsertRow");
stmt.execute();
conn.commit();
}
public static void createGroupByTestTable(Connection conn, String tableName) throws SQLException {
conn.createStatement().execute("create table " + tableName +
" (id varchar not null primary key,\n" +
" uri varchar, appcpu integer)");
}
private static void createTable(Connection conn, String inputSqlType, String tableName, String sortOrder) throws SQLException {
String dmlFormat =
"CREATE TABLE " + tableName + "_%s (id INTEGER NOT NULL, pk %s NOT NULL, " + "kv %s "
+ "CONSTRAINT PK_CONSTRAINT PRIMARY KEY (id, pk %s))";
String ddl = String.format(dmlFormat,sortOrder, inputSqlType, inputSqlType, sortOrder);
conn.createStatement().execute(ddl);
conn.commit();
}
/**
* Creates a table to be used for testing. It contains one id column, one varchar column to be used as input, and
* one column which will contain null values
*
* @param conn
* connection to be used
* @param inputSqlType
* sql type of input
* @param inputList
* list of values to be inserted into the pk column
*/
public static String initTables(Connection conn, String inputSqlType, List<Object> inputList) throws Exception {
String tableName = generateUniqueName();
createTable(conn, inputSqlType, tableName, "ASC");
createTable(conn, inputSqlType, tableName, "DESC");
for (int i = 0; i < inputList.size(); ++i) {
upsertRow(conn, tableName, "ASC", i, inputList.get(i));
upsertRow(conn, tableName, "DESC", i, inputList.get(i));
}
return tableName;
}
public static List<KeyRange> getAllSplits(Connection conn, String tableName) throws SQLException {
return getSplits(conn, tableName, null, null, null, null, null);
}
public static List<KeyRange> getAllSplits(Connection conn, String tableName, String where, String selectClause) throws SQLException {
return getSplits(conn, tableName, null, null, null, where, selectClause);
}
public static List<KeyRange> getSplits(Connection conn, String tableName, String pkCol, byte[] lowerRange, byte[] upperRange, String whereClauseSuffix, String selectClause) throws SQLException {
String whereClauseStart =
(lowerRange == null && upperRange == null ? "" :
" WHERE " + ((lowerRange != null ? (pkCol + " >= ? " + (upperRange != null ? " AND " : "")) : "")
+ (upperRange != null ? (pkCol + " < ?") : "" )));
String whereClause = whereClauseSuffix == null ? whereClauseStart : whereClauseStart.length() == 0 ? (" WHERE " + whereClauseSuffix) : (" AND " + whereClauseSuffix);
String query = "SELECT /*+ NO_INDEX */ "+selectClause+" FROM " + tableName + whereClause;
PhoenixPreparedStatement pstmt = conn.prepareStatement(query).unwrap(PhoenixPreparedStatement.class);
if (lowerRange != null) {
pstmt.setBytes(1, lowerRange);
}
if (upperRange != null) {
pstmt.setBytes(lowerRange != null ? 2 : 1, upperRange);
}
pstmt.execute();
List<KeyRange> keyRanges = pstmt.getQueryPlan().getSplits();
return keyRanges;
}
public static Collection<GuidePostsInfo> getGuidePostsList(Connection conn, String tableName) throws SQLException {
return getGuidePostsList(conn, tableName, null, null, null, null);
}
public static Collection<GuidePostsInfo> getGuidePostsList(Connection conn, String tableName, String where)
throws SQLException {
return getGuidePostsList(conn, tableName, null, null, null, where);
}
public static Collection<GuidePostsInfo> getGuidePostsList(Connection conn, String tableName, String pkCol,
byte[] lowerRange, byte[] upperRange, String whereClauseSuffix) throws SQLException {
String whereClauseStart = (lowerRange == null && upperRange == null ? ""
: " WHERE "
+ ((lowerRange != null ? (pkCol + " >= ? " + (upperRange != null ? " AND " : "")) : "") + (upperRange != null ? (pkCol + " < ?")
: "")));
String whereClause = whereClauseSuffix == null ? whereClauseStart
: whereClauseStart.length() == 0 ? (" WHERE " + whereClauseSuffix) : (" AND " + whereClauseSuffix);
String query = "SELECT /*+ NO_INDEX */ COUNT(*) FROM " + tableName + whereClause;
PhoenixPreparedStatement pstmt = conn.prepareStatement(query).unwrap(PhoenixPreparedStatement.class);
if (lowerRange != null) {
pstmt.setBytes(1, lowerRange);
}
if (upperRange != null) {
pstmt.setBytes(lowerRange != null ? 2 : 1, upperRange);
}
pstmt.execute();
TableRef tableRef = pstmt.getQueryPlan().getTableRef();
PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
PTable table = tableRef.getTable();
GuidePostsInfo info = pconn.getQueryServices().getTableStats(new GuidePostsKey(table.getName().getBytes(), SchemaUtil.getEmptyColumnFamily(table)));
return Collections.singletonList(info);
}
public static void analyzeTable(Connection conn, String tableName) throws IOException, SQLException {
analyzeTable(conn, tableName, false);
}
public static void analyzeTable(Connection conn, String tableName, boolean transactional) throws IOException, SQLException {
String query = "UPDATE STATISTICS " + tableName;
conn.createStatement().execute(query);
// if the table is transactional burn a txn in order to make sure the next txn read pointer is close to wall clock time
conn.commit();
}
public static void analyzeTableIndex(Connection conn, String tableName) throws IOException, SQLException {
String query = "UPDATE STATISTICS " + tableName+ " INDEX";
conn.createStatement().execute(query);
}
public static void analyzeTableColumns(Connection conn, String tableName) throws IOException, SQLException {
String query = "UPDATE STATISTICS " + tableName + " COLUMNS";
conn.createStatement().execute(query);
}
public static void analyzeTable(String url, Properties props, String tableName) throws IOException, SQLException {
Connection conn = DriverManager.getConnection(url, props);
analyzeTable(conn, tableName);
conn.close();
}
public static void setRowKeyColumns(PreparedStatement stmt, int i) throws SQLException {
// insert row
stmt.setString(1, "varchar" + String.valueOf(i));
stmt.setString(2, "char" + String.valueOf(i));
stmt.setInt(3, i);
stmt.setLong(4, i);
stmt.setBigDecimal(5, new BigDecimal(i*0.5d));
Date date = new Date(DateUtil.parseDate("2015-01-01 00:00:00").getTime() + (i - 1) * MILLIS_IN_DAY);
stmt.setDate(6, date);
}
public static void validateRowKeyColumns(ResultSet rs, int i) throws SQLException {
assertTrue(rs.next());
assertEquals(rs.getString(1), "varchar" + String.valueOf(i));
assertEquals(rs.getString(2), "char" + String.valueOf(i));
assertEquals(rs.getInt(3), i);
assertEquals(rs.getInt(4), i);
assertEquals(rs.getBigDecimal(5), new BigDecimal(i*0.5d));
Date date = new Date(DateUtil.parseDate("2015-01-01 00:00:00").getTime() + (i - 1) * MILLIS_IN_DAY);
assertEquals(rs.getDate(6), date);
}
public static ClientAggregators getSingleSumAggregator(String url, Properties props) throws SQLException {
try (PhoenixConnection pconn = DriverManager.getConnection(url, props).unwrap(PhoenixConnection.class)) {
PhoenixStatement statement = new PhoenixStatement(pconn);
StatementContext context = new StatementContext(statement, null, new Scan(), new SequenceManager(statement));
AggregationManager aggregationManager = context.getAggregationManager();
SumAggregateFunction func = new SumAggregateFunction(Arrays.<Expression>asList(new KeyValueColumnExpression(new PLongColumn() {
@Override
public PName getName() {
return SINGLE_COLUMN_NAME;
}
@Override
public PName getFamilyName() {
return SINGLE_COLUMN_FAMILY_NAME;
}
@Override
public int getPosition() {
return 0;
}
@Override
public SortOrder getSortOrder() {
return SortOrder.getDefault();
}
@Override
public Integer getArraySize() {
return 0;
}
@Override
public byte[] getViewConstant() {
return null;
}
@Override
public boolean isViewReferenced() {
return false;
}
@Override
public String getExpressionStr() {
return null;
}
@Override
public long getTimestamp() {
return HConstants.LATEST_TIMESTAMP;
}
@Override
public boolean isDerived() {
return false;
}
@Override
public boolean isExcluded() {
return false;
}
@Override
public boolean isRowTimestamp() {
return false;
}
@Override
public boolean isDynamic() {
return false;
}
@Override
public byte[] getColumnQualifierBytes() {
return SINGLE_COLUMN_NAME.getBytes();
}
})), null);
aggregationManager.setAggregators(new ClientAggregators(Collections.<SingleAggregateFunction>singletonList(func), 1));
ClientAggregators aggregators = aggregationManager.getAggregators();
return aggregators;
}
}
public static void createMultiCFTestTable(Connection conn, String tableName, String options) throws SQLException {
String ddl = "create table if not exists " + tableName + "(" +
" varchar_pk VARCHAR NOT NULL, " +
" char_pk CHAR(5) NOT NULL, " +
" int_pk INTEGER NOT NULL, "+
" long_pk BIGINT NOT NULL, " +
" decimal_pk DECIMAL(31, 10) NOT NULL, " +
" a.varchar_col1 VARCHAR, " +
" a.char_col1 CHAR(5), " +
" a.int_col1 INTEGER, " +
" a.long_col1 BIGINT, " +
" a.decimal_col1 DECIMAL(31, 10), " +
" b.varchar_col2 VARCHAR, " +
" b.char_col2 CHAR(5), " +
" b.int_col2 INTEGER, " +
" b.long_col2 BIGINT, " +
" b.decimal_col2 DECIMAL, " +
" b.date_col DATE " +
" CONSTRAINT pk PRIMARY KEY (varchar_pk, char_pk, int_pk, long_pk DESC, decimal_pk)) "
+ (options!=null? options : "");
conn.createStatement().execute(ddl);
}
/**
* Runs a major compaction, and then waits until the compaction is complete before returning.
*
* @param tableName name of the table to be compacted
*/
public static void doMajorCompaction(Connection conn, String tableName) throws Exception {
tableName = SchemaUtil.normalizeIdentifier(tableName);
// We simply write a marker row, request a major compaction, and then wait until the marker
// row is gone
PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
PTable table = pconn.getTable(new PTableKey(pconn.getTenantId(), tableName));
ConnectionQueryServices services = conn.unwrap(PhoenixConnection.class).getQueryServices();
MutationState mutationState = pconn.getMutationState();
if (table.isTransactional()) {
mutationState.startTransaction(table.getTransactionProvider());
}
try (Table htable = mutationState.getHTable(table)) {
byte[] markerRowKey = Bytes.toBytes("TO_DELETE");
Put put = new Put(markerRowKey);
put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_VALUE_BYTES, QueryConstants.EMPTY_COLUMN_VALUE_BYTES);
htable.put(put);
Delete delete = new Delete(markerRowKey);
delete.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_VALUE_BYTES);
htable.delete(delete);
htable.close();
if (table.isTransactional()) {
mutationState.commit();
}
Admin hbaseAdmin = services.getAdmin();
hbaseAdmin.flush(TableName.valueOf(tableName));
hbaseAdmin.majorCompact(TableName.valueOf(tableName));
hbaseAdmin.close();
boolean compactionDone = false;
while (!compactionDone) {
Thread.sleep(6000L);
Scan scan = new Scan();
scan.setStartRow(markerRowKey);
scan.setStopRow(Bytes.add(markerRowKey, new byte[] { 0 }));
scan.setRaw(true);
try (Table htableForRawScan = services.getTable(Bytes.toBytes(tableName))) {
ResultScanner scanner = htableForRawScan.getScanner(scan);
List<Result> results = Lists.newArrayList(scanner);
LOG.info("Results: " + results);
compactionDone = results.isEmpty();
scanner.close();
}
LOG.info("Compaction done: " + compactionDone);
// need to run compaction after the next txn snapshot has been written so that compaction can remove deleted rows
if (!compactionDone && table.isTransactional()) {
hbaseAdmin = services.getAdmin();
hbaseAdmin.flush(TableName.valueOf(tableName));
hbaseAdmin.majorCompact(TableName.valueOf(tableName));
hbaseAdmin.close();
}
}
}
}
public static void createTransactionalTable(Connection conn, String tableName) throws SQLException {
createTransactionalTable(conn, tableName, "");
}
public static void createTransactionalTable(Connection conn, String tableName, String extraProps) throws SQLException {
conn.createStatement().execute("create table " + tableName + TestUtil.TEST_TABLE_SCHEMA + "TRANSACTIONAL=true" + (extraProps.length() == 0 ? "" : ("," + extraProps)));
}
public static void dumpTable(Table table) throws IOException {
System.out.println("************ dumping " + table + " **************");
Scan s = new Scan();
s.setRaw(true);;
s.setMaxVersions();
try (ResultScanner scanner = table.getScanner(s)) {
Result result = null;
while ((result = scanner.next()) != null) {
CellScanner cellScanner = result.cellScanner();
Cell current = null;
while (cellScanner.advance()) {
current = cellScanner.current();
System.out.println(current);
}
}
}
System.out.println("-----------------------------------------------");
}
public static int getRawRowCount(Table table) throws IOException {
Scan s = new Scan();
s.setRaw(true);;
s.setMaxVersions();
int rows = 0;
try (ResultScanner scanner = table.getScanner(s)) {
Result result = null;
while ((result = scanner.next()) != null) {
rows++;
CellScanner cellScanner = result.cellScanner();
Cell current = null;
while (cellScanner.advance()) {
current = cellScanner.current();
}
}
}
return rows;
}
public static void dumpIndexStatus(Connection conn, String indexName) throws IOException, SQLException {
try (Table table = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES)) {
System.out.println("************ dumping index status for " + indexName + " **************");
Scan s = new Scan();
s.setRaw(true);
s.setMaxVersions();
byte[] startRow = SchemaUtil.getTableKeyFromFullName(indexName);
s.setStartRow(startRow);
s.setStopRow(ByteUtil.nextKey(ByteUtil.concat(startRow, QueryConstants.SEPARATOR_BYTE_ARRAY)));
try (ResultScanner scanner = table.getScanner(s)) {
Result result = null;
while ((result = scanner.next()) != null) {
CellScanner cellScanner = result.cellScanner();
Cell current = null;
while (cellScanner.advance()) {
current = cellScanner.current();
if (Bytes.compareTo(current.getQualifierArray(), current.getQualifierOffset(), current.getQualifierLength(), PhoenixDatabaseMetaData.INDEX_STATE_BYTES, 0, PhoenixDatabaseMetaData.INDEX_STATE_BYTES.length) == 0) {
System.out.println(current.getTimestamp() + "/INDEX_STATE=" + PIndexState.fromSerializedValue(current.getValueArray()[current.getValueOffset()]));
}
}
}
}
System.out.println("-----------------------------------------------");
}
}
public static void waitForIndexRebuild(Connection conn, String fullIndexName, PIndexState indexState) throws InterruptedException, SQLException {
waitForIndexState(conn, fullIndexName, indexState, 0L);
}
private static class IndexStateCheck {
public final PIndexState indexState;
public final Long indexDisableTimestamp;
public final Boolean success;
public IndexStateCheck(PIndexState indexState, Long indexDisableTimestamp, Boolean success) {
this.indexState = indexState;
this.indexDisableTimestamp = indexDisableTimestamp;
this.success = success;
}
}
public static void waitForIndexState(Connection conn, String fullIndexName, PIndexState expectedIndexState, Long expectedIndexDisableTimestamp) throws InterruptedException, SQLException {
int maxTries = 60, nTries = 0;
do {
Thread.sleep(1000); // sleep 1 sec
IndexStateCheck state = checkIndexStateInternal(conn, fullIndexName, expectedIndexState, expectedIndexDisableTimestamp);
if (state.success != null) {
if (Boolean.TRUE.equals(state.success)) {
return;
}
fail("Index state will not become " + expectedIndexState);
}
} while (++nTries < maxTries);
fail("Ran out of time waiting for index state to become " + expectedIndexState);
}
public static boolean checkIndexState(Connection conn, String fullIndexName, PIndexState expectedIndexState, Long expectedIndexDisableTimestamp) throws SQLException {
return Boolean.TRUE.equals(checkIndexStateInternal(conn,fullIndexName, expectedIndexState, expectedIndexDisableTimestamp).success);
}
public static void assertIndexState(Connection conn, String fullIndexName, PIndexState expectedIndexState, Long expectedIndexDisableTimestamp) throws SQLException {
IndexStateCheck state = checkIndexStateInternal(conn,fullIndexName, expectedIndexState, expectedIndexDisableTimestamp);
if (!Boolean.TRUE.equals(state.success)) {
if (expectedIndexState != null) {
assertEquals(expectedIndexState, state.indexState);
}
if (expectedIndexDisableTimestamp != null) {
assertEquals(expectedIndexDisableTimestamp, state.indexDisableTimestamp);
}
}
}
public static PIndexState getIndexState(Connection conn, String fullIndexName) throws SQLException {
IndexStateCheck state = checkIndexStateInternal(conn, fullIndexName, null, null);
return state.indexState;
}
private static IndexStateCheck checkIndexStateInternal(Connection conn, String fullIndexName, PIndexState expectedIndexState, Long expectedIndexDisableTimestamp) throws SQLException {
String schema = SchemaUtil.getSchemaNameFromFullName(fullIndexName);
String index = SchemaUtil.getTableNameFromFullName(fullIndexName);
String query = "SELECT CAST(" + PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP + " AS BIGINT)," + PhoenixDatabaseMetaData.INDEX_STATE + " FROM " +
PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME + " WHERE (" + PhoenixDatabaseMetaData.TABLE_SCHEM + "," + PhoenixDatabaseMetaData.TABLE_NAME
+ ") = (" + "'" + schema + "','" + index + "') "
+ "AND " + PhoenixDatabaseMetaData.COLUMN_FAMILY + " IS NULL AND " + PhoenixDatabaseMetaData.COLUMN_NAME + " IS NULL";
ResultSet rs = conn.createStatement().executeQuery(query);
Long actualIndexDisableTimestamp = null;
PIndexState actualIndexState = null;
if (rs.next()) {
actualIndexDisableTimestamp = rs.getLong(1);
actualIndexState = PIndexState.fromSerializedValue(rs.getString(2));
boolean matchesExpected = (expectedIndexDisableTimestamp == null || Objects.equal(actualIndexDisableTimestamp, expectedIndexDisableTimestamp))
&& (expectedIndexState == null || actualIndexState == expectedIndexState);
if (matchesExpected) {
return new IndexStateCheck(actualIndexState, actualIndexDisableTimestamp, Boolean.TRUE);
}
if (ZERO.equals(actualIndexDisableTimestamp)) {
return new IndexStateCheck(actualIndexState, actualIndexDisableTimestamp, Boolean.FALSE);
}
}
return new IndexStateCheck(actualIndexState, actualIndexDisableTimestamp, null);
}
public static long getRowCount(Connection conn, String tableName) throws SQLException {
ResultSet rs = conn.createStatement().executeQuery("SELECT /*+ NO_INDEX */ count(*) FROM " + tableName);
assertTrue(rs.next());
return rs.getLong(1);
}
public static void addCoprocessor(Connection conn, String tableName, Class coprocessorClass) throws Exception {
int priority = QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY + 100;
ConnectionQueryServices services = conn.unwrap(PhoenixConnection.class).getQueryServices();
TableDescriptor descriptor = services.getTableDescriptor(Bytes.toBytes(tableName));
TableDescriptorBuilder descriptorBuilder = null;
if (!descriptor.getCoprocessors().contains(coprocessorClass.getName())) {
descriptorBuilder=TableDescriptorBuilder.newBuilder(descriptor);
descriptorBuilder.addCoprocessor(coprocessorClass.getName(), null, priority, null);
}else{
return;
}
final int retries = 10;
int numTries = 10;
descriptor = descriptorBuilder.build();
try (Admin admin = services.getAdmin()) {
admin.modifyTable(descriptor);
while (!admin.getDescriptor(TableName.valueOf(tableName)).equals(descriptor)
&& numTries > 0) {
numTries--;
if (numTries == 0) {
throw new Exception(
"Failed to add " + coprocessorClass.getName() + " after "
+ retries + " retries.");
}
Thread.sleep(1000);
}
}
}
public static void removeCoprocessor(Connection conn, String tableName, Class coprocessorClass) throws Exception {
ConnectionQueryServices services = conn.unwrap(PhoenixConnection.class).getQueryServices();
TableDescriptor descriptor = services.getTableDescriptor(Bytes.toBytes(tableName));
TableDescriptorBuilder descriptorBuilder = null;
if (descriptor.getCoprocessors().contains(coprocessorClass.getName())) {
descriptorBuilder=TableDescriptorBuilder.newBuilder(descriptor);
descriptorBuilder.removeCoprocessor(coprocessorClass.getName());
}else{
return;
}
final int retries = 10;
int numTries = retries;
descriptor = descriptorBuilder.build();
try (Admin admin = services.getAdmin()) {
admin.modifyTable(descriptor);
while (!admin.getDescriptor(TableName.valueOf(tableName)).equals(descriptor)
&& numTries > 0) {
numTries--;
if (numTries == 0) {
throw new Exception(
"Failed to remove " + coprocessorClass.getName() + " after "
+ retries + " retries.");
}
Thread.sleep(1000);
}
}
}
public static boolean compare(CompareOp op, ImmutableBytesWritable lhsOutPtr, ImmutableBytesWritable rhsOutPtr) {
int compareResult = Bytes.compareTo(lhsOutPtr.get(), lhsOutPtr.getOffset(), lhsOutPtr.getLength(), rhsOutPtr.get(), rhsOutPtr.getOffset(), rhsOutPtr.getLength());
return ByteUtil.compare(op, compareResult);
}
public static QueryPlan getOptimizeQueryPlan(Connection conn,String sql) throws SQLException {
PhoenixPreparedStatement statement = conn.prepareStatement(sql).unwrap(PhoenixPreparedStatement.class);
QueryPlan queryPlan = statement.optimizeQuery(sql);
queryPlan.iterator();
return queryPlan;
}
public static void assertResultSet(ResultSet rs,Object[][] rows) throws Exception {
for(int rowIndex=0; rowIndex < rows.length; rowIndex++) {
assertTrue("rowIndex:["+rowIndex+"] rs.next error!",rs.next());
for(int columnIndex = 1; columnIndex <= rows[rowIndex].length; columnIndex++) {
Object realValue = rs.getObject(columnIndex);
Object expectedValue = rows[rowIndex][columnIndex-1];
if(realValue == null) {
assertNull("rowIndex:["+rowIndex+"],columnIndex:["+columnIndex+"]",expectedValue);
}
else {
assertEquals("rowIndex:["+rowIndex+"],columnIndex:["+columnIndex+"],realValue:["+
realValue+"],expectedValue:["+expectedValue+"]",
expectedValue,
realValue
);
}
}
}
assertTrue(!rs.next());
}
public static Collection<Object[]> filterTxParamData(Collection<Object[]> data, int index) {
boolean runAllTests = true;
boolean runNoTests = true;
for (TransactionFactory.Provider provider : TransactionFactory.Provider.values()) {
runAllTests &= provider.runTests();
runNoTests &= !provider.runTests();
}
if (runNoTests) {
return Collections.emptySet();
}
if (runAllTests) {
return data;
}
List<Object[]> filteredData = Lists.newArrayListWithExpectedSize(data.size());
for (Object[] params : data) {
String provider = (String)params[index];
if (provider == null || TransactionFactory.Provider.valueOf(provider).runTests()) {
filteredData.add(params);
}
}
return filteredData;
}
/**
* Find a random free port in localhost for binding.
* @return A port number or -1 for failure.
*/
public static int getRandomPort() {
try (ServerSocket socket = new ServerSocket(0)) {
socket.setReuseAddress(true);
return socket.getLocalPort();
} catch (IOException e) {
return -1;
}
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui.popup.tree;
import com.intellij.icons.AllIcons;
import com.intellij.ide.util.treeView.AlphaComparator;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import consulo.awt.TargetAWT;
import consulo.logging.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.TreePopup;
import com.intellij.openapi.ui.popup.TreePopupStep;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.popup.NextStepHandler;
import com.intellij.ui.popup.WizardPopup;
import com.intellij.ui.treeStructure.SimpleTree;
import com.intellij.ui.treeStructure.filtered.FilteringTreeBuilder;
import com.intellij.ui.treeStructure.filtered.FilteringTreeStructure;
import com.intellij.util.ui.tree.TreeUtil;
import consulo.ui.annotation.RequiredUIAccess;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import java.awt.*;
import java.awt.event.*;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
public class TreePopupImpl extends WizardPopup implements TreePopup, NextStepHandler {
private static final Logger LOG = Logger.getInstance(TreePopupImpl.class);
private MyTree myWizardTree;
private MouseMotionListener myMouseMotionListener;
private MouseListener myMouseListener;
private final List<TreePath> mySavedExpanded = new ArrayList<>();
private TreePath mySavedSelected;
private TreePath myShowingChildPath;
private TreePath myPendingChildPath;
private FilteringTreeBuilder myBuilder;
public TreePopupImpl(@Nullable Project project, @Nullable JBPopup parent, @Nonnull TreePopupStep<Object> aStep, @Nullable Object parentValue) {
super(project, parent, aStep);
setParentValue(parentValue);
}
@Override
protected JComponent createContent() {
myWizardTree = new MyTree();
myWizardTree.getAccessibleContext().setAccessibleName("WizardTree");
myBuilder = new FilteringTreeBuilder(myWizardTree, this, getTreeStep().getStructure(), AlphaComparator.INSTANCE) {
@Override
protected boolean isSelectable(final Object nodeObject) {
return getTreeStep().isSelectable(nodeObject, nodeObject);
}
};
myBuilder.updateFromRoot();
myWizardTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
Action action = myWizardTree.getActionMap().get("toggleSelectionPreserveAnchor");
if (action != null) {
action.setEnabled(false);
}
myWizardTree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_SPACE) {
toggleExpansion(myWizardTree.getAnchorSelectionPath());
}
}
});
myWizardTree.setRootVisible(getTreeStep().isRootVisible());
myWizardTree.setShowsRootHandles(true);
ToolTipManager.sharedInstance().registerComponent(myWizardTree);
myWizardTree.setCellRenderer(new MyRenderer());
myMouseMotionListener = new MyMouseMotionListener();
myMouseListener = new MyMouseListener();
registerAction("select", KeyEvent.VK_ENTER, 0, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
handleSelect(true, null);
}
});
registerAction("toggleExpansion", KeyEvent.VK_SPACE, 0, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
toggleExpansion(myWizardTree.getSelectionPath());
}
});
final Action oldExpandAction = getActionMap().get("selectChild");
getActionMap().put("selectChild", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
final TreePath path = myWizardTree.getSelectionPath();
if (path != null && 0 == myWizardTree.getModel().getChildCount(path.getLastPathComponent())) {
handleSelect(false, null);
return;
}
oldExpandAction.actionPerformed(e);
}
});
final Action oldCollapseAction = getActionMap().get("selectParent");
getActionMap().put("selectParent", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
final TreePath path = myWizardTree.getSelectionPath();
if (shouldHidePopup(path)) {
goBack();
return;
}
oldCollapseAction.actionPerformed(e);
}
});
return myWizardTree;
}
private boolean shouldHidePopup(TreePath path) {
if (getParent() == null) return false;
if (path == null) return false;
if (!myWizardTree.isCollapsed(path)) return false;
if (myWizardTree.isRootVisible()) {
return path.getPathCount() == 1;
}
return path.getPathCount() == 2;
}
@Override
protected ActionMap getActionMap() {
return myWizardTree.getActionMap();
}
@Override
protected InputMap getInputMap() {
return myWizardTree.getInputMap();
}
private void addListeners() {
myWizardTree.addMouseMotionListener(myMouseMotionListener);
myWizardTree.addMouseListener(myMouseListener);
}
@Override
public void dispose() {
mySavedExpanded.clear();
final Enumeration<TreePath> expanded = myWizardTree.getExpandedDescendants(new TreePath(myWizardTree.getModel().getRoot()));
if (expanded != null) {
while (expanded.hasMoreElements()) {
mySavedExpanded.add(expanded.nextElement());
}
}
mySavedSelected = myWizardTree.getSelectionPath();
myWizardTree.removeMouseMotionListener(myMouseMotionListener);
myWizardTree.removeMouseListener(myMouseListener);
super.dispose();
}
@Override
protected boolean beforeShow() {
addListeners();
expandAll();
collapseAll();
restoreExpanded();
if (mySavedSelected != null) {
myWizardTree.setSelectionPath(mySavedSelected);
}
return super.beforeShow();
}
@Override
protected void afterShow() {
selectFirstSelectableItem();
}
// TODO: not-tested code:
private void selectFirstSelectableItem() {
for (int i = 0; i < myWizardTree.getRowCount(); i++) {
TreePath path = myWizardTree.getPathForRow(i);
if (getTreeStep().isSelectable(path.getLastPathComponent(), extractUserObject(path.getLastPathComponent()))) {
myWizardTree.setSelectionPath(path);
break;
}
}
}
private void restoreExpanded() {
if (mySavedExpanded.isEmpty()) {
expandAll();
return;
}
for (TreePath each : mySavedExpanded) {
myWizardTree.expandPath(each);
}
}
private void expandAll() {
for (int i = 0; i < myWizardTree.getRowCount(); i++) {
myWizardTree.expandRow(i);
}
}
private void collapseAll() {
int row = myWizardTree.getRowCount() - 1;
while (row > 0) {
myWizardTree.collapseRow(row);
row--;
}
}
private TreePopupStep getTreeStep() {
return (TreePopupStep)myStep;
}
private class MyMouseMotionListener extends MouseMotionAdapter {
private Point myLastMouseLocation;
private boolean isMouseMoved(Point location) {
if (myLastMouseLocation == null) {
myLastMouseLocation = location;
return false;
}
return !myLastMouseLocation.equals(location);
}
@Override
public void mouseMoved(MouseEvent e) {
if (!isMouseMoved(e.getLocationOnScreen())) return;
final TreePath path = getPath(e);
if (path != null) {
myWizardTree.setSelectionPath(path);
notifyParentOnChildSelection();
if (getTreeStep().isSelectable(path.getLastPathComponent(), extractUserObject(path.getLastPathComponent()))) {
myWizardTree.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
if (myPendingChildPath == null || !myPendingChildPath.equals(path)) {
myPendingChildPath = path;
restartTimer();
}
return;
}
}
myWizardTree.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
}
}
private TreePath getPath(MouseEvent e) {
return myWizardTree.getClosestPathForLocation(e.getPoint().x, e.getPoint().y);
}
private class MyMouseListener extends MouseAdapter {
@Override
public void mousePressed(MouseEvent e) {
final TreePath path = getPath(e);
if (path == null) {
return;
}
if (e.getButton() != MouseEvent.BUTTON1) {
return;
}
final Object selected = path.getLastPathComponent();
if (getTreeStep().isSelectable(selected, extractUserObject(selected))) {
handleSelect(true, e);
}
else {
if (!TreeUtil.isLocationInExpandControl(myWizardTree, path, e.getX(), e.getY())) {
toggleExpansion(path);
}
}
}
@Override
public void mouseReleased(MouseEvent e) {
}
}
private void toggleExpansion(TreePath path) {
if (path == null) {
return;
}
if (getTreeStep().isSelectable(path.getLastPathComponent(), extractUserObject(path.getLastPathComponent()))) {
if (myWizardTree.isExpanded(path)) {
myWizardTree.collapsePath(path);
}
else {
myWizardTree.expandPath(path);
}
}
}
private void handleSelect(boolean handleFinalChoices, MouseEvent e) {
final boolean pathIsAlreadySelected = myShowingChildPath != null && myShowingChildPath.equals(myWizardTree.getSelectionPath());
if (pathIsAlreadySelected) return;
myPendingChildPath = null;
Object selected = myWizardTree.getLastSelectedPathComponent();
if (selected != null) {
final Object userObject = extractUserObject(selected);
if (getTreeStep().isSelectable(selected, userObject)) {
disposeChildren();
final boolean hasNextStep = myStep.hasSubstep(userObject);
if (!hasNextStep && !handleFinalChoices) {
myShowingChildPath = null;
return;
}
AtomicBoolean insideOnChosen = new AtomicBoolean(true);
ApplicationManager.getApplication().invokeLater(() -> {
if (insideOnChosen.get()) {
LOG.error("Showing dialogs from popup onChosen can result in focus issues. Please put the handler into BaseStep.doFinalStep or PopupStep.getFinalRunnable.");
}
}, ModalityState.any());
final PopupStep queriedStep;
try {
queriedStep = myStep.onChosen(userObject, handleFinalChoices);
}
finally {
insideOnChosen.set(false);
}
if (queriedStep == PopupStep.FINAL_CHOICE || !hasNextStep) {
setFinalRunnable(myStep.getFinalRunnable());
setOk(true);
disposeAllParents(e);
}
else {
myShowingChildPath = myWizardTree.getSelectionPath();
handleNextStep(queriedStep, myShowingChildPath);
myShowingChildPath = null;
}
}
}
}
@Override
public void handleNextStep(PopupStep nextStep, Object parentValue) {
final Rectangle pathBounds = myWizardTree.getPathBounds(myWizardTree.getSelectionPath());
final Point point = new RelativePoint(myWizardTree, new Point(getContent().getWidth() + 2, (int)pathBounds.getY())).getScreenPoint();
myChild = createPopup(this, nextStep, parentValue);
myChild.show(getContent(), point.x - STEP_X_PADDING, point.y, true);
}
private class MyRenderer extends NodeRenderer {
@RequiredUIAccess
@Override
public void customizeCellRenderer(@Nonnull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
final boolean shouldPaintSelected = (getTreeStep().isSelectable(value, extractUserObject(value)) && selected) || (getTreeStep().isSelectable(value, extractUserObject(value)) && hasFocus);
final boolean shouldPaintFocus = !getTreeStep().isSelectable(value, extractUserObject(value)) && selected || shouldPaintSelected || hasFocus;
super.customizeCellRenderer(tree, value, shouldPaintSelected, expanded, leaf, row, shouldPaintFocus);
}
}
@Override
protected void process(KeyEvent aEvent) {
myWizardTree.processKeyEvent(aEvent);
}
private Object extractUserObject(Object aNode) {
Object object = ((DefaultMutableTreeNode)aNode).getUserObject();
if (object instanceof FilteringTreeStructure.FilteringNode) {
return ((FilteringTreeStructure.FilteringNode)object).getDelegate();
}
return object;
}
private class MyTree extends SimpleTree {
@Override
public void processKeyEvent(KeyEvent e) {
e.setSource(this);
super.processKeyEvent(e);
}
@Override
public Dimension getPreferredSize() {
final Dimension pref = super.getPreferredSize();
return new Dimension(pref.width + 10, pref.height);
}
@Override
protected void paintChildren(Graphics g) {
super.paintChildren(g);
Rectangle visibleRect = getVisibleRect();
int rowForLocation = getClosestRowForLocation(0, visibleRect.y);
int limit = rowForLocation + TreeUtil.getVisibleRowCount(this) + 1;
for (int i = rowForLocation; i < limit; i++) {
final TreePath eachPath = getPathForRow(i);
if (eachPath == null) continue;
final Object lastPathComponent = eachPath.getLastPathComponent();
final boolean hasNextStep = getTreeStep().hasSubstep(extractUserObject(lastPathComponent));
if (!hasNextStep) continue;
Icon icon = TargetAWT.to(isPathSelected(eachPath) ? AllIcons.Icons.Ide.NextStep : AllIcons.Icons.Ide.NextStepInverted);
final Rectangle rec = getPathBounds(eachPath);
int x = getSize().width - icon.getIconWidth() - 1;
int y = rec.y + (rec.height - icon.getIconWidth()) / 2;
icon.paintIcon(this, g, x, y);
}
}
}
@Override
protected void onAutoSelectionTimer() {
handleSelect(false, null);
}
@Override
protected JComponent getPreferredFocusableComponent() {
return myWizardTree;
}
@Override
protected void onSpeedSearchPatternChanged() {
myBuilder.refilterAsync();
}
@Override
protected void onChildSelectedFor(Object value) {
TreePath path = (TreePath)value;
if (myWizardTree.getSelectionPath() != path) {
myWizardTree.setSelectionPath(path);
}
}
@Override
public boolean isModalContext() {
return true;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.curator.ensemble.exhibitor;
import com.google.common.collect.Lists;
import com.google.common.io.Closeables;
import org.apache.curator.BaseClassForTests;
import org.apache.curator.CuratorZookeeperClient;
import org.apache.curator.RetryLoop;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.curator.retry.RetryOneTime;
import org.apache.curator.test.TestingServer;
import org.apache.curator.test.Timing;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.data.Stat;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.io.IOException;
import java.util.concurrent.Callable;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicReference;
public class TestExhibitorEnsembleProvider extends BaseClassForTests
{
private static final Exhibitors.BackupConnectionStringProvider dummyConnectionStringProvider = new Exhibitors.BackupConnectionStringProvider()
{
@Override
public String getBackupConnectionString() throws Exception
{
return null;
}
};
@Test
public void testExhibitorFailures() throws Exception
{
final AtomicReference<String> backupConnectionString = new AtomicReference<String>("backup1:1");
final AtomicReference<String> connectionString = new AtomicReference<String>("count=1&port=2&server0=localhost");
Exhibitors exhibitors = new Exhibitors
(
Lists.newArrayList("foo", "bar"),
1000,
new Exhibitors.BackupConnectionStringProvider()
{
@Override
public String getBackupConnectionString()
{
return backupConnectionString.get();
}
}
);
ExhibitorRestClient mockRestClient = new ExhibitorRestClient()
{
@Override
public String getRaw(String hostname, int port, String uriPath, String mimeType) throws Exception
{
String localConnectionString = connectionString.get();
if ( localConnectionString == null )
{
throw new IOException();
}
return localConnectionString;
}
};
final Semaphore semaphore = new Semaphore(0);
ExhibitorEnsembleProvider provider = new ExhibitorEnsembleProvider(exhibitors, mockRestClient, "/foo", 10, new RetryOneTime(1))
{
@Override
protected void poll()
{
super.poll();
semaphore.release();
}
};
provider.pollForInitialEnsemble();
try
{
provider.start();
Assert.assertEquals(provider.getConnectionString(), "localhost:2");
connectionString.set(null);
semaphore.drainPermits();
semaphore.acquire(); // wait for next poll
Assert.assertEquals(provider.getConnectionString(), "backup1:1");
backupConnectionString.set("backup2:2");
semaphore.drainPermits();
semaphore.acquire(); // wait for next poll
Assert.assertEquals(provider.getConnectionString(), "backup2:2");
connectionString.set("count=1&port=3&server0=localhost3");
semaphore.drainPermits();
semaphore.acquire(); // wait for next poll
Assert.assertEquals(provider.getConnectionString(), "localhost3:3");
}
finally
{
Closeables.closeQuietly(provider);
}
}
@Test
public void testChanging() throws Exception
{
TestingServer secondServer = new TestingServer();
try
{
String mainConnectionString = "count=1&port=" + server.getPort() + "&server0=localhost";
String secondConnectionString = "count=1&port=" + secondServer.getPort() + "&server0=localhost";
final Semaphore semaphore = new Semaphore(0);
final AtomicReference<String> connectionString = new AtomicReference<String>(mainConnectionString);
Exhibitors exhibitors = new Exhibitors(Lists.newArrayList("foo", "bar"), 1000, dummyConnectionStringProvider);
ExhibitorRestClient mockRestClient = new ExhibitorRestClient()
{
@Override
public String getRaw(String hostname, int port, String uriPath, String mimeType) throws Exception
{
semaphore.release();
return connectionString.get();
}
};
ExhibitorEnsembleProvider provider = new ExhibitorEnsembleProvider(exhibitors, mockRestClient, "/foo", 10, new RetryOneTime(1));
provider.pollForInitialEnsemble();
Timing timing = new Timing().multiple(4);
final CuratorZookeeperClient client = new CuratorZookeeperClient(provider, timing.session(), timing.connection(), null, new RetryOneTime(2));
client.start();
try
{
RetryLoop.callWithRetry
(
client,
new Callable<Object>()
{
@Override
public Object call() throws Exception
{
client.getZooKeeper().create("/test", new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
return null;
}
}
);
connectionString.set(secondConnectionString);
semaphore.drainPermits();
semaphore.acquire();
server.stop(); // create situation where the current zookeeper gets a sys-disconnected
Stat stat = RetryLoop.callWithRetry
(
client,
new Callable<Stat>()
{
@Override
public Stat call() throws Exception
{
return client.getZooKeeper().exists("/test", false);
}
}
);
Assert.assertNull(stat); // it's a different server so should be null
}
finally
{
client.close();
}
}
finally
{
Closeables.closeQuietly(secondServer);
}
}
@Test
public void testSimple() throws Exception
{
Exhibitors exhibitors = new Exhibitors(Lists.newArrayList("foo", "bar"), 1000, dummyConnectionStringProvider);
ExhibitorRestClient mockRestClient = new ExhibitorRestClient()
{
@Override
public String getRaw(String hostname, int port, String uriPath, String mimeType) throws Exception
{
return "count=1&port=" + server.getPort() + "&server0=localhost";
}
};
ExhibitorEnsembleProvider provider = new ExhibitorEnsembleProvider(exhibitors, mockRestClient, "/foo", 10, new RetryOneTime(1));
provider.pollForInitialEnsemble();
Timing timing = new Timing(4);
CuratorZookeeperClient client = new CuratorZookeeperClient(provider, timing.session(), timing.connection(), null, new ExponentialBackoffRetry(timing.milliseconds(), 3));
client.start();
try
{
client.blockUntilConnectedOrTimedOut();
client.getZooKeeper().exists("/", false);
}
catch ( Exception e )
{
System.out.println("provider.getConnectionString(): " + provider.getConnectionString() + " server.getPort(): " + server.getPort());
e.printStackTrace();
Assert.fail();
throw e;
}
finally
{
client.close();
}
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.db.tool;
import static com.orientechnologies.orient.core.record.impl.ODocumentHelper.makeDbCall;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.command.OCommandOutputListener;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.OClusterPosition;
import com.orientechnologies.orient.core.id.OClusterPositionFactory;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexManager;
import com.orientechnologies.orient.core.metadata.OMetadataDefault;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ODocumentHelper;
import com.orientechnologies.orient.core.record.impl.ODocumentHelper.ODbRelatedCall;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.core.storage.OPhysicalPosition;
import com.orientechnologies.orient.core.storage.ORawBuffer;
import com.orientechnologies.orient.core.storage.OStorage;
public class ODatabaseCompare extends ODatabaseImpExpAbstract {
private OStorage storage1;
private OStorage storage2;
private ODatabaseDocumentTx databaseDocumentTxOne;
private ODatabaseDocumentTx databaseDocumentTxTwo;
private boolean compareEntriesForAutomaticIndexes = false;
private boolean autoDetectExportImportMap = true;
private OIndex<OIdentifiable> exportImportHashTable = null;
private int differences = 0;
public ODatabaseCompare(String iDb1URL, String iDb2URL, final OCommandOutputListener iListener) throws IOException {
super(null, null, iListener);
listener.onMessage("\nComparing two local databases:\n1) " + iDb1URL + "\n2) " + iDb2URL + "\n");
storage1 = Orient.instance().loadStorage(iDb1URL);
storage1.open(null, null, null);
storage2 = Orient.instance().loadStorage(iDb2URL);
storage2.open(null, null, null);
}
public ODatabaseCompare(String iDb1URL, String iDb2URL, final String userName, final String userPassword,
final OCommandOutputListener iListener) throws IOException {
super(null, null, iListener);
listener.onMessage("\nComparing two local databases:\n1) " + iDb1URL + "\n2) " + iDb2URL + "\n");
databaseDocumentTxOne = new ODatabaseDocumentTx(iDb1URL);
databaseDocumentTxOne.open(userName, userPassword);
databaseDocumentTxTwo = new ODatabaseDocumentTx(iDb2URL);
databaseDocumentTxTwo.open(userName, userPassword);
storage1 = databaseDocumentTxOne.getStorage();
storage2 = databaseDocumentTxTwo.getStorage();
// exclude automatically generated clusters
excludeClusters.add("orids");
excludeClusters.add(OMetadataDefault.CLUSTER_INDEX_NAME);
excludeClusters.add(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME);
}
public boolean isCompareEntriesForAutomaticIndexes() {
return compareEntriesForAutomaticIndexes;
}
public void setAutoDetectExportImportMap(boolean autoDetectExportImportMap) {
this.autoDetectExportImportMap = autoDetectExportImportMap;
}
public void setCompareEntriesForAutomaticIndexes(boolean compareEntriesForAutomaticIndexes) {
this.compareEntriesForAutomaticIndexes = compareEntriesForAutomaticIndexes;
}
public boolean compare() {
if (isDocumentDatabases() && (databaseDocumentTxOne == null || databaseDocumentTxTwo == null)) {
listener.onMessage("\nPassed in URLs are related to document databases but credentials "
+ "were not provided to open them. Please provide user name + password for databases to compare");
return false;
}
if (!isDocumentDatabases() && (databaseDocumentTxOne != null || databaseDocumentTxTwo != null)) {
listener.onMessage("\nPassed in URLs are not related to document databases but credentials "
+ "were provided to open them. Please do not provide user name + password for databases to compare");
return false;
}
try {
ODocumentHelper.RIDMapper ridMapper = null;
if (autoDetectExportImportMap) {
listener
.onMessage("\nAuto discovery of mapping between RIDs of exported and imported records is switched on, try to discover mapping data on disk.");
exportImportHashTable = (OIndex<OIdentifiable>) databaseDocumentTxTwo.getMetadata().getIndexManager()
.getIndex(ODatabaseImport.EXPORT_IMPORT_MAP_NAME);
if (exportImportHashTable != null) {
listener.onMessage("\nMapping data were found and will be loaded.");
ridMapper = new ODocumentHelper.RIDMapper() {
@Override
public ORID map(ORID rid) {
if (rid == null)
return null;
if (!rid.isPersistent())
return null;
final OIdentifiable result = exportImportHashTable.get(rid);
if (result == null)
return null;
return result.getIdentity();
}
};
} else
listener.onMessage("\nMapping data were not found.");
}
compareClusters();
compareRecords(ridMapper);
if (isDocumentDatabases())
compareIndexes(ridMapper);
if (differences == 0) {
listener.onMessage("\n\nDatabases match.");
return true;
} else {
listener.onMessage("\n\nDatabases do not match. Found " + differences + " difference(s).");
return false;
}
} catch (Exception e) {
e.printStackTrace();
throw new ODatabaseExportException("Error on compare of database '" + storage1.getName() + "' against '" + storage2.getName()
+ "'", e);
} finally {
storage1.close();
storage2.close();
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private void compareIndexes(ODocumentHelper.RIDMapper ridMapper) {
listener.onMessage("\nStarting index comparison:");
boolean ok = true;
final OIndexManager indexManagerOne = makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<OIndexManager>() {
public OIndexManager call() {
return databaseDocumentTxOne.getMetadata().getIndexManager();
}
});
final OIndexManager indexManagerTwo = makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<OIndexManager>() {
public OIndexManager call() {
return databaseDocumentTxTwo.getMetadata().getIndexManager();
}
});
final Collection<? extends OIndex<?>> indexesOne = makeDbCall(databaseDocumentTxOne,
new ODbRelatedCall<Collection<? extends OIndex<?>>>() {
public Collection<? extends OIndex<?>> call() {
return indexManagerOne.getIndexes();
}
});
int indexesSizeOne = makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<Integer>() {
public Integer call() {
return indexesOne.size();
}
});
int indexesSizeTwo = makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<Integer>() {
public Integer call() {
return indexManagerTwo.getIndexes().size();
}
});
if (exportImportHashTable != null)
indexesSizeTwo--;
if (indexesSizeOne != indexesSizeTwo) {
ok = false;
listener.onMessage("\n- ERR: Amount of indexes are different.");
listener.onMessage("\n--- DB1: " + indexesSizeOne);
listener.onMessage("\n--- DB2: " + indexesSizeTwo);
listener.onMessage("\n");
++differences;
}
final Iterator<? extends OIndex<?>> iteratorOne = makeDbCall(databaseDocumentTxOne,
new ODbRelatedCall<Iterator<? extends OIndex<?>>>() {
public Iterator<? extends OIndex<?>> call() {
return indexesOne.iterator();
}
});
while (makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return iteratorOne.hasNext();
}
})) {
final OIndex indexOne = makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<OIndex<?>>() {
public OIndex<?> call() {
return iteratorOne.next();
}
});
final OIndex<?> indexTwo = makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<OIndex<?>>() {
public OIndex<?> call() {
return indexManagerTwo.getIndex(indexOne.getName());
}
});
if (indexTwo == null) {
ok = false;
listener.onMessage("\n- ERR: Index " + indexOne.getName() + " is absent in DB2.");
++differences;
continue;
}
if (!indexOne.getType().equals(indexTwo.getType())) {
ok = false;
listener.onMessage("\n- ERR: Index types for index " + indexOne.getName() + " are different.");
listener.onMessage("\n--- DB1: " + indexOne.getType());
listener.onMessage("\n--- DB2: " + indexTwo.getType());
listener.onMessage("\n");
++differences;
continue;
}
if (!indexOne.getClusters().equals(indexTwo.getClusters())) {
ok = false;
listener.onMessage("\n- ERR: Clusters to index for index " + indexOne.getName() + " are different.");
listener.onMessage("\n--- DB1: " + indexOne.getClusters());
listener.onMessage("\n--- DB2: " + indexTwo.getClusters());
listener.onMessage("\n");
++differences;
continue;
}
if (indexOne.getDefinition() == null && indexTwo.getDefinition() != null) {
ok = false;
listener.onMessage("\n- ERR: Index definition for index " + indexOne.getName() + " for DB2 is not null.");
++differences;
continue;
} else if (indexOne.getDefinition() != null && indexTwo.getDefinition() == null) {
ok = false;
listener.onMessage("\n- ERR: Index definition for index " + indexOne.getName() + " for DB2 is null.");
++differences;
continue;
} else if (indexOne.getDefinition() != null && !indexOne.getDefinition().equals(indexTwo.getDefinition())) {
ok = false;
listener.onMessage("\n- ERR: Index definitions for index " + indexOne.getName() + " are different.");
listener.onMessage("\n--- DB1: " + indexOne.getDefinition());
listener.onMessage("\n--- DB2: " + indexTwo.getDefinition());
listener.onMessage("\n");
++differences;
continue;
}
final long indexOneSize = makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Long>() {
public Long call() {
return indexOne.getSize();
}
});
final long indexTwoSize = makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<Long>() {
public Long call() {
return indexTwo.getSize();
}
});
if (indexOneSize != indexTwoSize) {
ok = false;
listener.onMessage("\n- ERR: Amount of entries for index " + indexOne.getName() + " are different.");
listener.onMessage("\n--- DB1: " + indexOneSize);
listener.onMessage("\n--- DB2: " + indexTwoSize);
listener.onMessage("\n");
++differences;
}
final ODocument metadataOne = indexOne.getMetadata();
final ODocument metadataTwo = indexTwo.getMetadata();
if (metadataOne == null && metadataTwo != null) {
ok = false;
listener.onMessage("\n- ERR: Metadata for index " + indexOne.getName() + " for DB1 is null but for DB2 is not.");
listener.onMessage("\n");
++differences;
} else if (metadataOne != null && metadataTwo == null) {
ok = false;
listener.onMessage("\n- ERR: Metadata for index " + indexOne.getName() + " for DB1 is not null but for DB2 is null.");
listener.onMessage("\n");
++differences;
} else if (metadataOne != null && metadataTwo != null
&& !ODocumentHelper.hasSameContentOf(metadataOne, databaseDocumentTxOne, metadataTwo, databaseDocumentTxTwo, ridMapper)) {
ok = false;
listener.onMessage("\n- ERR: Metadata for index " + indexOne.getName() + " for DB1 and for DB2 are different.");
makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Object>() {
@Override
public Object call() {
listener.onMessage("\n--- M1: " + metadataOne);
return null;
}
});
makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<Object>() {
@Override
public Object call() {
listener.onMessage("\n--- M2: " + metadataTwo);
return null;
}
});
listener.onMessage("\n");
++differences;
}
if (((compareEntriesForAutomaticIndexes && !indexOne.getType().equals("DICTIONARY")) || !indexOne.isAutomatic())) {
final Iterator<Map.Entry<Object, Object>> indexIteratorOne = makeDbCall(databaseDocumentTxOne,
new ODbRelatedCall<Iterator<Map.Entry<Object, Object>>>() {
public Iterator<Map.Entry<Object, Object>> call() {
return indexOne.iterator();
}
});
while (makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return indexIteratorOne.hasNext();
}
})) {
final Map.Entry<Object, Object> indexOneEntry = makeDbCall(databaseDocumentTxOne,
new ODbRelatedCall<Map.Entry<Object, Object>>() {
public Map.Entry<Object, Object> call() {
return indexIteratorOne.next();
}
});
final Object key = makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Object>() {
public Object call() {
return indexOneEntry.getKey();
}
});
Object indexOneValue = makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Object>() {
public Object call() {
return indexOneEntry.getValue();
}
});
final Object indexTwoValue = makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<Object>() {
public Object call() {
return indexTwo.get(key);
}
});
if (indexTwoValue == null) {
ok = false;
listener.onMessage("\n- ERR: Entry with key " + key + " is absent in index " + indexOne.getName() + " for DB2.");
++differences;
continue;
}
if (indexOneValue instanceof Set && indexTwoValue instanceof Set) {
final Set<Object> indexOneValueSet = (Set<Object>) indexOneValue;
final Set<Object> indexTwoValueSet = (Set<Object>) indexTwoValue;
if (!ODocumentHelper.compareSets(databaseDocumentTxOne, indexOneValueSet, databaseDocumentTxTwo, indexTwoValueSet,
ridMapper)) {
ok = false;
reportIndexDiff(indexOne, key, indexOneValue, indexTwoValue);
}
} else if (indexOneValue instanceof ORID && indexTwoValue instanceof ORID) {
if (ridMapper != null && ((ORID) indexOneValue).isPersistent()) {
OIdentifiable identifiable = ridMapper.map((ORID) indexOneValue);
if (identifiable != null)
indexOneValue = identifiable.getIdentity();
}
if (!indexOneValue.equals(indexTwoValue)) {
ok = false;
reportIndexDiff(indexOne, key, indexOneValue, indexTwoValue);
}
} else if (!indexOneValue.equals(indexTwoValue)) {
ok = false;
reportIndexDiff(indexOne, key, indexOneValue, indexTwoValue);
}
}
}
}
if (ok)
listener.onMessage("OK");
}
private boolean compareClusters() {
listener.onMessage("\nStarting shallow comparison of clusters:");
listener.onMessage("\nChecking the number of clusters...");
if (storage1.getClusterNames().size() != storage1.getClusterNames().size()) {
listener.onMessage("ERR: cluster sizes are different: " + storage1.getClusterNames().size() + " <-> "
+ storage1.getClusterNames().size());
++differences;
}
int cluster2Id;
boolean ok;
for (String clusterName : storage1.getClusterNames()) {
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName))
continue;
}
ok = true;
cluster2Id = storage2.getClusterIdByName(clusterName);
listener.onMessage("\n- Checking cluster " + String.format("%-25s: ", "'" + clusterName + "'"));
if (cluster2Id == -1) {
listener.onMessage("ERR: cluster name " + clusterName + " was not found on database " + storage2);
++differences;
ok = false;
}
if (cluster2Id != storage1.getClusterIdByName(clusterName)) {
listener.onMessage("ERR: cluster id is different for cluster " + clusterName + ": "
+ storage1.getClusterIdByName(clusterName) + " <-> " + cluster2Id);
++differences;
ok = false;
}
if (storage1.count(cluster2Id) != storage2.count(cluster2Id)) {
listener.onMessage("ERR: number of records different in cluster '" + clusterName + "' (id=" + cluster2Id + "): "
+ storage1.count(cluster2Id) + " <-> " + storage2.count(cluster2Id));
++differences;
ok = false;
}
if (ok)
listener.onMessage("OK");
}
listener.onMessage("\n\nShallow analysis done.");
return true;
}
private boolean compareRecords(ODocumentHelper.RIDMapper ridMapper) {
listener.onMessage("\nStarting deep comparison record by record. This may take a few minutes. Wait please...");
int clusterId;
for (String clusterName : storage1.getClusterNames()) {
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName))
continue;
}
clusterId = storage1.getClusterIdByName(clusterName);
OClusterPosition[] db1Range = storage1.getClusterDataRange(clusterId);
OClusterPosition[] db2Range = storage2.getClusterDataRange(clusterId);
final OClusterPosition db1Max = db1Range[1];
final OClusterPosition db2Max = db2Range[1];
final ODocument doc1 = new ODocument();
final ODocument doc2 = new ODocument();
final ORecordId rid = new ORecordId(clusterId);
// TODO why this maximums can be different?
final OClusterPosition clusterMax = db1Max.compareTo(db2Max) > 0 ? db1Max : db2Max;
final OStorage storage;
if (clusterMax.equals(db1Max))
storage = storage1;
else
storage = storage2;
OPhysicalPosition[] physicalPositions = storage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition(
OClusterPositionFactory.INSTANCE.valueOf(0)));
long recordsCounter = 0;
while (physicalPositions.length > 0) {
for (OPhysicalPosition physicalPosition : physicalPositions) {
recordsCounter++;
final OClusterPosition position = physicalPosition.clusterPosition;
rid.clusterPosition = position;
if (isDocumentDatabases() && rid.equals(new ORecordId(storage1.getConfiguration().indexMgrRecordId))
&& rid.equals(new ORecordId(storage2.getConfiguration().indexMgrRecordId)))
continue;
final ORawBuffer buffer1 = storage1.readRecord(rid, null, true, null, false).getResult();
final ORawBuffer buffer2;
if (ridMapper == null)
buffer2 = storage2.readRecord(rid, null, true, null, false).getResult();
else {
final ORID newRid = ridMapper.map(rid);
if (newRid == null)
buffer2 = storage2.readRecord(rid, null, true, null, false).getResult();
else
buffer2 = storage2.readRecord(new ORecordId(newRid), null, true, null, false).getResult();
}
if (buffer1 == null && buffer2 == null)
// BOTH RECORD NULL, OK
continue;
else if (buffer1 == null && buffer2 != null) {
// REC1 NULL
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " is null in DB1");
++differences;
} else if (buffer1 != null && buffer2 == null) {
// REC2 NULL
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " is null in DB2");
++differences;
} else {
if (buffer1.recordType != buffer2.recordType) {
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " recordType is different: "
+ (char) buffer1.recordType + " <-> " + (char) buffer2.recordType);
++differences;
}
if (buffer1.buffer == null && buffer2.buffer == null) {
} else if (buffer1.buffer == null && buffer2.buffer != null) {
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " content is different: null <-> "
+ buffer2.buffer.length);
++differences;
} else if (buffer1.buffer != null && buffer2.buffer == null) {
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " content is different: " + buffer1.buffer.length
+ " <-> null");
++differences;
} else {
if (buffer1.recordType == ODocument.RECORD_TYPE) {
// DOCUMENT: TRY TO INSTANTIATE AND COMPARE
makeDbCall(databaseDocumentTxOne, new ODocumentHelper.ODbRelatedCall<Object>() {
public Object call() {
doc1.reset();
doc1.fromStream(buffer1.buffer);
return null;
}
});
makeDbCall(databaseDocumentTxTwo, new ODocumentHelper.ODbRelatedCall<Object>() {
public Object call() {
doc2.reset();
doc2.fromStream(buffer2.buffer);
return null;
}
});
if (rid.toString().equals(storage1.getConfiguration().schemaRecordId)
&& rid.toString().equals(storage2.getConfiguration().schemaRecordId)) {
makeDbCall(databaseDocumentTxOne, new ODocumentHelper.ODbRelatedCall<java.lang.Object>() {
public Object call() {
convertSchemaDoc(doc1);
return null;
}
});
makeDbCall(databaseDocumentTxTwo, new ODocumentHelper.ODbRelatedCall<java.lang.Object>() {
public Object call() {
convertSchemaDoc(doc2);
return null;
}
});
}
if (!ODocumentHelper.hasSameContentOf(doc1, databaseDocumentTxOne, doc2, databaseDocumentTxTwo, ridMapper)) {
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " document content is different");
listener.onMessage("\n--- REC1: " + new String(buffer1.buffer));
listener.onMessage("\n--- REC2: " + new String(buffer2.buffer));
listener.onMessage("\n");
++differences;
}
} else {
if (buffer1.buffer.length != buffer2.buffer.length) {
// CHECK IF THE TRIMMED SIZE IS THE SAME
final String rec1 = new String(buffer1.buffer).trim();
final String rec2 = new String(buffer2.buffer).trim();
if (rec1.length() != rec2.length()) {
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " content length is different: "
+ buffer1.buffer.length + " <-> " + buffer2.buffer.length);
if (buffer1.recordType == ODocument.RECORD_TYPE || buffer1.recordType == ORecordFlat.RECORD_TYPE)
listener.onMessage("\n--- REC1: " + rec1);
if (buffer2.recordType == ODocument.RECORD_TYPE || buffer2.recordType == ORecordFlat.RECORD_TYPE)
listener.onMessage("\n--- REC2: " + rec2);
listener.onMessage("\n");
++differences;
}
} else {
// CHECK BYTE PER BYTE
for (int b = 0; b < buffer1.buffer.length; ++b) {
if (buffer1.buffer[b] != buffer2.buffer[b]) {
listener.onMessage("\n- ERR: RID=" + clusterId + ":" + position + " content is different at byte #" + b
+ ": " + buffer1.buffer[b] + " <-> " + buffer2.buffer[b]);
listener.onMessage("\n--- REC1: " + new String(buffer1.buffer));
listener.onMessage("\n--- REC2: " + new String(buffer2.buffer));
listener.onMessage("\n");
++differences;
break;
}
}
}
}
}
}
}
physicalPositions = storage.higherPhysicalPositions(clusterId, physicalPositions[physicalPositions.length - 1]);
if (recordsCounter % 10000 == 0)
listener.onMessage("\n" + recordsCounter + " records were processed for cluster " + clusterName + " ...");
}
listener.onMessage("\nCluster comparison was finished, " + recordsCounter + " records were processed for cluster "
+ clusterName + " ...");
}
return true;
}
private void convertSchemaDoc(final ODocument document) {
if (document.field("classes") != null) {
document.setFieldType("classes", OType.EMBEDDEDSET);
for (ODocument classDoc : document.<Set<ODocument>> field("classes")) {
classDoc.setFieldType("properties", OType.EMBEDDEDSET);
}
}
}
private boolean isDocumentDatabases() {
return storage1.getConfiguration().schemaRecordId != null && storage2.getConfiguration().schemaRecordId != null;
}
private void reportIndexDiff(OIndex<?> indexOne, Object key, final Object indexOneValue, final Object indexTwoValue) {
listener.onMessage("\n- ERR: Entry values for key '" + key + "' are different for index " + indexOne.getName());
listener.onMessage("\n--- DB1: " + makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<String>() {
public String call() {
return indexOneValue.toString();
}
}));
listener.onMessage("\n--- DB2: " + makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<String>() {
public String call() {
return indexTwoValue.toString();
}
}));
listener.onMessage("\n");
++differences;
}
}
| |
import java.awt.Font;
import java.awt.FontFormatException;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.text.DecimalFormat;
import org.newdawn.slick.Color;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.Input;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.TrueTypeFont;
import org.newdawn.slick.geom.Circle;
import org.newdawn.slick.geom.Rectangle;
import org.newdawn.slick.geom.RoundedRectangle;
import org.newdawn.slick.state.BasicGameState;
import org.newdawn.slick.state.StateBasedGame;
import org.newdawn.slick.state.transition.FadeInTransition;
import org.newdawn.slick.state.transition.FadeOutTransition;
public class OptionsState extends BasicGameState
{
//Creates variables kto hold the widow width and height off the game
int windowWidth;
int windowHeight;
//Creates a variable for a custom font use while drawing
TrueTypeFont wordFont;
PadInput pads;
//Creates an instance of the StateHandler class
StateHandler stateHandler;
Circle[] optionCircles;
String[] optionCircleStrings;
Color[] optionCircleColors;
Color[] selectorColors;
int selected;
//Creates a boolean to tell whether or not we are have selected something
boolean selectedMode;
float selectedOpacity;
int[] timers;
//Creates variables for the music and sound volumes
float musicVolume;
float soundVolume;
//Constructor
public OptionsState(StateHandler sh, PadInput p)
{
//Initializes the stateHandler instance to a universal class acorss all states
stateHandler = sh;
pads = p;
}
//Initializes various variables
public void init(GameContainer gc, StateBasedGame state) throws SlickException
{
//Gets the games width and height and then sets variables equal to each
windowWidth = gc.getWidth();
windowHeight = gc.getHeight();
//Initializes a new java awt font from a file
Font font = null;
try
{
font = Font.createFont(Font.TRUETYPE_FONT, new FileInputStream("data/Fonts/belerenbold.ttf"));
} catch (FileNotFoundException e) {e.printStackTrace();} catch (FontFormatException e) {e.printStackTrace();} catch (IOException e) {e.printStackTrace();}
//Sets the font to be plain and have a size of 30
font = font.deriveFont(Font.PLAIN, 30);
//Initializes the font to a truetypefont which can be used to draw strings on the screen in a custom font
wordFont = new TrueTypeFont(font, false);
//Initializes selectedMode to start off
selectedMode = false;
optionCircleStrings = new String[]{"Music Volume", "Sound FX Volume", "Exit"};
optionCircles = new Circle[3];
optionCircles[0] = new Circle(windowWidth/6, windowHeight/5, windowHeight/6);
optionCircles[1] = new Circle(windowWidth/2, windowHeight/5, windowHeight/6);
optionCircles[2] = new Circle(windowWidth * 5/6, windowHeight/5, windowHeight/6);
optionCircleColors = new Color[]{Color.white, Color.white, Color.white};
selectorColors = new Color[3];
selectorColors[0] = new Color(173, 16, 16);
selectorColors[1] = new Color(22, 161, 22);
selectorColors[2] = new Color(10, 29, 145);
selectedOpacity = 0;
timers = new int[]{0, 0, 0};
//Gets the current music and sound FX volume from the universal statehandler and then sets class variables to those values
musicVolume = stateHandler.getMusicVolume();
soundVolume = stateHandler.getSoundVolume();
}
//Checks for certain events and then updates variables accordingly
public void update(GameContainer gc, StateBasedGame state, int delta) throws SlickException
{
//Gets the input from the keyboard and puts it into a variable
Input input = gc.getInput();
if (selectedMode)
{
if (selectedOpacity < .75f)
selectedOpacity += .01f;
if (input.isKeyDown(Input.KEY_H) && input.isKeyDown(Input.KEY_K) || pads.input == 6)
{
stateHandler.setMusicVolume(musicVolume);
stateHandler.setSoundVolume(soundVolume);
selectedMode = false;
for (int i = 0; i < optionCircleColors.length; i++)
optionCircleColors[i] = Color.white;
}
if (input.isKeyDown(Input.KEY_H) || pads.input == 1)
{
if (selected == 0)
{
if (musicVolume > .01f)
musicVolume -= .01f;
}
else
{
if (soundVolume > .01f)
soundVolume -= .01f;
}
}
else if (input.isKeyDown(Input.KEY_K) || pads.input == 5)
{
if (selected == 0)
{
if (musicVolume < 1)
musicVolume += .01f;
}
else
{
if (soundVolume < 1)
soundVolume += .01f;
}
}
}
else
{
if (selectedOpacity > 0)
selectedOpacity -= .01f;
if (input.isKeyDown(Input.KEY_H) || pads.input == 1)
{
timers[0]++;
timers[1] = 0;
timers[2] = 0;
}
else if (input.isKeyDown(Input.KEY_J) || pads.input == 3)
{
timers[1]++;
timers[0] = 0;
timers[2] = 0;
}
else if (input.isKeyDown(Input.KEY_K) || pads.input == 5)
{
timers[2]++;
timers[0] = 0;
timers[1] = 0;
}
else
{
for (int i = 0; i < timers.length; i++)
timers[i] = 0;
}
if (timers[0] == 100)
{
selected = 0;
optionCircleColors[0] = selectorColors[0];
selectedMode = true;
for (int i = 0; i < timers.length; i++)
timers[i] = 0;
}
else if (timers[1] == 100)
{
selected = 1;
optionCircleColors[1] = selectorColors[1];
selectedMode = true;
for (int i = 0; i < timers.length; i++)
timers[i] = 0;
}
else if (timers[2] == 100)
{
for (int i = 0; i < timers.length; i++)
timers[i] = 0;
state.enterState(0, new FadeOutTransition(Color.black, 750), new FadeInTransition(Color.black, 750));
}
}
//Clears the keyboard input record
//Needed so that key presses do not get accidently passed between states
input.clearKeyPressedRecord();
}
//Renders to the string
public void render(GameContainer gc, StateBasedGame state, Graphics g) throws SlickException
{
//Sets smoother rendering
g.setAntiAlias(true);
//Sets the graphics to draw using the font created above
g.setFont(wordFont);
//Sets the line width that the graphics should draw with
g.setLineWidth(7);
//Sets the color to gray
g.setColor(new Color(84, 84, 84));
//Draws the background gray color
g.fill(new Rectangle(0, 0, windowWidth, windowHeight));
for (int i = 0; i < optionCircles.length; i++)
{
g.setColor(optionCircleColors[i]);
g.fill(optionCircles[i]);
g.setColor(Color.black);
g.drawString(optionCircleStrings[i], optionCircles[i].getCenterX() - g.getFont().getWidth(optionCircleStrings[i])/2, optionCircles[i].getCenterY() - g.getFont().getHeight(optionCircleStrings[i])/2);
g.setColor(new Color(selectorColors[i].r, selectorColors[i].g, selectorColors[i].b, .25f));
g.draw(optionCircles[i]);
g.setColor(selectorColors[i]);
g.drawArc(optionCircles[i].getX(), optionCircles[i].getY(), optionCircles[i].getWidth(), optionCircles[i].getHeight(), 270, 270 + (timers[i] * (360f/100f)));
}
g.setColor(new Color(0, 0, 0, selectedOpacity));
g.fill(new Rectangle(0, 0, windowWidth, windowHeight));
if (selectedMode)
{
g.setAntiAlias(false);
DecimalFormat df = new DecimalFormat();
df.setMaximumFractionDigits(2);
g.setColor(new Color(selectorColors[selected].r, selectorColors[selected].g, selectorColors[selected].b, .25f + selectedOpacity));
if (selected == 0)
{
g.fillArc(windowWidth/2 - windowHeight/5, windowHeight * 3/4 - windowHeight/5, windowHeight/5 * 2, windowHeight/5 * 2, 270, 270 + (musicVolume * 360));
g.setColor(Color.black);
g.drawString(df.format(musicVolume/1.0f), windowWidth/2 - g.getFont().getWidth(df.format(musicVolume/1.0f))/2, windowHeight * 3/4 - g.getFont().getHeight(df.format(musicVolume/1.0f))/2);
}
else
{
g.fillArc(windowWidth/2 - windowHeight/5, windowHeight * 3/4 - windowHeight/5, windowHeight/5 * 2, windowHeight/5 * 2, 270, 270 + (soundVolume * 360));
g.setColor(Color.black);
g.drawString(df.format(soundVolume/1.0f), windowWidth/2 - g.getFont().getWidth(df.format(musicVolume/1.0f))/2, windowHeight * 3/4 - g.getFont().getHeight(df.format(musicVolume/1.0f))/2);
}
}
}
//Identifies state id
public int getID()
{
return 2;
}
}
| |
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dataproc.v1.stub;
import static com.google.cloud.dataproc.v1.JobControllerClient.ListJobsPagedResponse;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dataproc.v1.CancelJobRequest;
import com.google.cloud.dataproc.v1.DeleteJobRequest;
import com.google.cloud.dataproc.v1.GetJobRequest;
import com.google.cloud.dataproc.v1.Job;
import com.google.cloud.dataproc.v1.ListJobsRequest;
import com.google.cloud.dataproc.v1.ListJobsResponse;
import com.google.cloud.dataproc.v1.SubmitJobRequest;
import com.google.cloud.dataproc.v1.UpdateJobRequest;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS
/**
* gRPC stub implementation for Google Cloud Dataproc API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator")
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public class GrpcJobControllerStub extends JobControllerStub {
private static final MethodDescriptor<SubmitJobRequest, Job> submitJobMethodDescriptor =
MethodDescriptor.<SubmitJobRequest, Job>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dataproc.v1.JobController/SubmitJob")
.setRequestMarshaller(ProtoUtils.marshaller(SubmitJobRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance()))
.build();
private static final MethodDescriptor<GetJobRequest, Job> getJobMethodDescriptor =
MethodDescriptor.<GetJobRequest, Job>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dataproc.v1.JobController/GetJob")
.setRequestMarshaller(ProtoUtils.marshaller(GetJobRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance()))
.build();
private static final MethodDescriptor<ListJobsRequest, ListJobsResponse>
listJobsMethodDescriptor =
MethodDescriptor.<ListJobsRequest, ListJobsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dataproc.v1.JobController/ListJobs")
.setRequestMarshaller(ProtoUtils.marshaller(ListJobsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ListJobsResponse.getDefaultInstance()))
.build();
private static final MethodDescriptor<UpdateJobRequest, Job> updateJobMethodDescriptor =
MethodDescriptor.<UpdateJobRequest, Job>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dataproc.v1.JobController/UpdateJob")
.setRequestMarshaller(ProtoUtils.marshaller(UpdateJobRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance()))
.build();
private static final MethodDescriptor<CancelJobRequest, Job> cancelJobMethodDescriptor =
MethodDescriptor.<CancelJobRequest, Job>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dataproc.v1.JobController/CancelJob")
.setRequestMarshaller(ProtoUtils.marshaller(CancelJobRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance()))
.build();
private static final MethodDescriptor<DeleteJobRequest, Empty> deleteJobMethodDescriptor =
MethodDescriptor.<DeleteJobRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dataproc.v1.JobController/DeleteJob")
.setRequestMarshaller(ProtoUtils.marshaller(DeleteJobRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.build();
private final BackgroundResource backgroundResources;
private final UnaryCallable<SubmitJobRequest, Job> submitJobCallable;
private final UnaryCallable<GetJobRequest, Job> getJobCallable;
private final UnaryCallable<ListJobsRequest, ListJobsResponse> listJobsCallable;
private final UnaryCallable<ListJobsRequest, ListJobsPagedResponse> listJobsPagedCallable;
private final UnaryCallable<UpdateJobRequest, Job> updateJobCallable;
private final UnaryCallable<CancelJobRequest, Job> cancelJobCallable;
private final UnaryCallable<DeleteJobRequest, Empty> deleteJobCallable;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcJobControllerStub create(JobControllerStubSettings settings)
throws IOException {
return new GrpcJobControllerStub(settings, ClientContext.create(settings));
}
public static final GrpcJobControllerStub create(ClientContext clientContext) throws IOException {
return new GrpcJobControllerStub(JobControllerStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcJobControllerStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcJobControllerStub(
JobControllerStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcJobControllerStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcJobControllerStub(JobControllerStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcJobControllerCallableFactory());
}
/**
* Constructs an instance of GrpcJobControllerStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcJobControllerStub(
JobControllerStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
GrpcCallSettings<SubmitJobRequest, Job> submitJobTransportSettings =
GrpcCallSettings.<SubmitJobRequest, Job>newBuilder()
.setMethodDescriptor(submitJobMethodDescriptor)
.build();
GrpcCallSettings<GetJobRequest, Job> getJobTransportSettings =
GrpcCallSettings.<GetJobRequest, Job>newBuilder()
.setMethodDescriptor(getJobMethodDescriptor)
.build();
GrpcCallSettings<ListJobsRequest, ListJobsResponse> listJobsTransportSettings =
GrpcCallSettings.<ListJobsRequest, ListJobsResponse>newBuilder()
.setMethodDescriptor(listJobsMethodDescriptor)
.build();
GrpcCallSettings<UpdateJobRequest, Job> updateJobTransportSettings =
GrpcCallSettings.<UpdateJobRequest, Job>newBuilder()
.setMethodDescriptor(updateJobMethodDescriptor)
.build();
GrpcCallSettings<CancelJobRequest, Job> cancelJobTransportSettings =
GrpcCallSettings.<CancelJobRequest, Job>newBuilder()
.setMethodDescriptor(cancelJobMethodDescriptor)
.build();
GrpcCallSettings<DeleteJobRequest, Empty> deleteJobTransportSettings =
GrpcCallSettings.<DeleteJobRequest, Empty>newBuilder()
.setMethodDescriptor(deleteJobMethodDescriptor)
.build();
this.submitJobCallable =
callableFactory.createUnaryCallable(
submitJobTransportSettings, settings.submitJobSettings(), clientContext);
this.getJobCallable =
callableFactory.createUnaryCallable(
getJobTransportSettings, settings.getJobSettings(), clientContext);
this.listJobsCallable =
callableFactory.createUnaryCallable(
listJobsTransportSettings, settings.listJobsSettings(), clientContext);
this.listJobsPagedCallable =
callableFactory.createPagedCallable(
listJobsTransportSettings, settings.listJobsSettings(), clientContext);
this.updateJobCallable =
callableFactory.createUnaryCallable(
updateJobTransportSettings, settings.updateJobSettings(), clientContext);
this.cancelJobCallable =
callableFactory.createUnaryCallable(
cancelJobTransportSettings, settings.cancelJobSettings(), clientContext);
this.deleteJobCallable =
callableFactory.createUnaryCallable(
deleteJobTransportSettings, settings.deleteJobSettings(), clientContext);
backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public UnaryCallable<SubmitJobRequest, Job> submitJobCallable() {
return submitJobCallable;
}
public UnaryCallable<GetJobRequest, Job> getJobCallable() {
return getJobCallable;
}
public UnaryCallable<ListJobsRequest, ListJobsPagedResponse> listJobsPagedCallable() {
return listJobsPagedCallable;
}
public UnaryCallable<ListJobsRequest, ListJobsResponse> listJobsCallable() {
return listJobsCallable;
}
public UnaryCallable<UpdateJobRequest, Job> updateJobCallable() {
return updateJobCallable;
}
public UnaryCallable<CancelJobRequest, Job> cancelJobCallable() {
return cancelJobCallable;
}
public UnaryCallable<DeleteJobRequest, Empty> deleteJobCallable() {
return deleteJobCallable;
}
@Override
public final void close() {
shutdown();
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
| |
package org.zstack.storage.primary;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.cascade.CascadeConstant;
import org.zstack.core.cascade.CascadeFacade;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.cloudbus.CloudBusListCallBack;
import org.zstack.core.cloudbus.EventFacade;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.Q;
import org.zstack.core.db.SQL;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.inventory.InventoryFacade;
import org.zstack.core.job.JobQueueFacade;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.NopeCompletion;
import org.zstack.header.core.ReturnValueCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.message.APIDeleteMessage;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.storage.primary.*;
import org.zstack.header.storage.primary.PrimaryStorageCanonicalEvent.PrimaryStorageDeletedData;
import org.zstack.header.storage.primary.PrimaryStorageCanonicalEvent.PrimaryStorageStatusChangedData;
import org.zstack.header.storage.snapshot.ChangeVolumeSnapshotStatusReply;
import org.zstack.header.storage.snapshot.VolumeSnapshotConstant;
import org.zstack.header.storage.snapshot.VolumeSnapshotReportPrimaryStorageCapacityUsageMsg;
import org.zstack.header.storage.snapshot.VolumeSnapshotReportPrimaryStorageCapacityUsageReply;
import org.zstack.header.vm.StopVmInstanceMsg;
import org.zstack.header.vm.VmAttachVolumeValidatorMethod;
import org.zstack.header.vm.VmInstanceConstant;
import org.zstack.header.volume.VolumeConstant;
import org.zstack.header.volume.VolumeReportPrimaryStorageCapacityUsageMsg;
import org.zstack.header.volume.VolumeReportPrimaryStorageCapacityUsageReply;
import org.zstack.header.volume.VolumeType;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import static org.zstack.core.Platform.operr;
import javax.persistence.LockModeType;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE, dependencyCheck = true)
public abstract class PrimaryStorageBase extends AbstractPrimaryStorage {
private final static CLogger logger = Utils.getLogger(PrimaryStorageBase.class);
protected PrimaryStorageVO self;
@Autowired
protected CloudBus bus;
@Autowired
protected DatabaseFacade dbf;
@Autowired
protected JobQueueFacade jobf;
@Autowired
protected PrimaryStorageExtensionPointEmitter extpEmitter;
@Autowired
protected InventoryFacade invf;
@Autowired
protected CascadeFacade casf;
@Autowired
protected ErrorFacade errf;
@Autowired
protected ThreadFacade thdf;
@Autowired
protected PrimaryStorageOverProvisioningManager ratioMgr;
@Autowired
protected EventFacade evtf;
@Autowired
protected PrimaryStoragePingTracker tracker;
public static class PhysicalCapacityUsage {
public long totalPhysicalSize;
public long availablePhysicalSize;
}
public static class ConnectParam {
private boolean newAdded;
public boolean isNewAdded() {
return newAdded;
}
public void setNewAdded(boolean newAdded) {
this.newAdded = newAdded;
}
}
protected abstract void handle(InstantiateVolumeOnPrimaryStorageMsg msg);
protected abstract void handle(DeleteVolumeOnPrimaryStorageMsg msg);
protected abstract void handle(CreateTemplateFromVolumeOnPrimaryStorageMsg msg);
protected abstract void handle(DownloadDataVolumeToPrimaryStorageMsg msg);
protected abstract void handle(DeleteBitsOnPrimaryStorageMsg msg);
protected abstract void handle(DownloadIsoToPrimaryStorageMsg msg);
protected abstract void handle(DeleteIsoFromPrimaryStorageMsg msg);
protected abstract void handle(AskVolumeSnapshotCapabilityMsg msg);
protected abstract void handle(SyncVolumeSizeOnPrimaryStorageMsg msg);
protected abstract void handle(MergeVolumeSnapshotOnPrimaryStorageMsg msg);
protected abstract void handle(DeleteSnapshotOnPrimaryStorageMsg msg);
protected abstract void handle(RevertVolumeFromSnapshotOnPrimaryStorageMsg msg);
protected abstract void handle(ReInitRootVolumeFromTemplateOnPrimaryStorageMsg msg);
protected abstract void connectHook(ConnectParam param, Completion completion);
protected abstract void pingHook(Completion completion);
protected abstract void syncPhysicalCapacity(ReturnValueCompletion<PhysicalCapacityUsage> completion);
public PrimaryStorageBase(PrimaryStorageVO self) {
this.self = self;
}
protected PrimaryStorageInventory getSelfInventory() {
return PrimaryStorageInventory.valueOf(self);
}
protected String getSyncId() {
return String.format("primaryStorage-%s", self.getUuid());
}
@Override
public void attachHook(String clusterUuid, Completion completion) {
completion.success();
}
@Override
public void detachHook(String clusterUuid, Completion completion) {
completion.success();
}
@Override
public void deleteHook() {
}
@Override
public void changeStateHook(PrimaryStorageStateEvent evt, PrimaryStorageState nextState) {
}
@Override
public void handleMessage(Message msg) {
try {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
} catch (Exception e) {
bus.logExceptionWithMessageDump(msg, e);
bus.replyErrorByMessageType(msg, e);
}
}
// if new kind of storage is added , override it
protected void checkImageIfNeedToDownload(DownloadIsoToPrimaryStorageMsg msg){
logger.debug("check if image exist in disabled primary storage");
if(self.getState() != PrimaryStorageState.Disabled){
return ;
}
if( !Q.New(ImageCacheVO.class)
.eq(ImageCacheVO_.primaryStorageUuid, self.getUuid())
.eq(ImageCacheVO_.imageUuid, msg.getIsoSpec().getInventory().getUuid())
.isExists()){
throw new OperationFailureException(errf.stringToOperationError(
String.format("cannot attach ISO to a primary storage[uuid:%s] which is disabled",
self.getUuid())));
}
}
private void forbidOperationWhenPrimaryStorageDisable(String primaryStorageState) {
if (primaryStorageState.equals(PrimaryStorageState.Disabled.toString())) {
logger.debug("checking primary storage status whether Disabled");
String error = "Operation is not permitted when primary storage status is 'Disabled', please check primary storage status";
ErrorCode errorCode = new ErrorCode();
errorCode.setCode(PrimaryStorageErrors.ALLOCATE_ERROR.toString());
errorCode.setDetails(error);
errorCode.setDescription("Operation is not permitted");
throw new OperationFailureException(errorCode);
}
}
private void forbidOperationWhenPrimaryStorageMaintenance(String primaryStorageState) {
logger.debug("checking primary storage status whether Maintenance");
if (primaryStorageState.equals(PrimaryStorageState.Maintenance.toString())) {
String error = "Operation is not permitted when primary storage status is 'Maintenance', please check primary storage status";
ErrorCode errorCode = new ErrorCode();
errorCode.setCode(PrimaryStorageErrors.ALLOCATE_ERROR.toString());
errorCode.setDetails(error);
errorCode.setDescription("Operation is not permitted");
throw new OperationFailureException(errorCode);
}
}
private void checkPrimaryStatus(Message msg) {
if (msg instanceof InstantiateVolumeOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
forbidOperationWhenPrimaryStorageDisable(self.getState().toString());
} else if (msg instanceof DeleteVolumeOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof CreateTemplateFromVolumeOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageDisable(self.getState().toString());
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof PrimaryStorageDeletionMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
forbidOperationWhenPrimaryStorageDisable(self.getState().toString());
} else if (msg instanceof DownloadDataVolumeToPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageDisable(self.getState().toString());
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof DeleteBitsOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof DeleteIsoFromPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof AskVolumeSnapshotCapabilityMsg) {
forbidOperationWhenPrimaryStorageDisable(self.getState().toString());
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof MergeVolumeSnapshotOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof DeleteSnapshotOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof RevertVolumeFromSnapshotOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
} else if (msg instanceof ReInitRootVolumeFromTemplateOnPrimaryStorageMsg) {
forbidOperationWhenPrimaryStorageMaintenance(self.getState().toString());
forbidOperationWhenPrimaryStorageDisable(self.getState().toString());
}
}
protected void handleLocalMessage(Message msg) {
checkPrimaryStatus(msg);
if (msg instanceof PrimaryStorageReportPhysicalCapacityMsg) {
handle((PrimaryStorageReportPhysicalCapacityMsg) msg);
} else if (msg instanceof RecalculatePrimaryStorageCapacityMsg) {
handle((RecalculatePrimaryStorageCapacityMsg) msg);
} else if (msg instanceof InstantiateVolumeOnPrimaryStorageMsg) {
handle((InstantiateVolumeOnPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteVolumeOnPrimaryStorageMsg) {
handle((DeleteVolumeOnPrimaryStorageMsg) msg);
} else if (msg instanceof CreateTemplateFromVolumeOnPrimaryStorageMsg) {
handleBase((CreateTemplateFromVolumeOnPrimaryStorageMsg) msg);
} else if (msg instanceof PrimaryStorageDeletionMsg) {
handle((PrimaryStorageDeletionMsg) msg);
} else if (msg instanceof DetachPrimaryStorageFromClusterMsg) {
handle((DetachPrimaryStorageFromClusterMsg) msg);
} else if (msg instanceof DownloadDataVolumeToPrimaryStorageMsg) {
handleBase((DownloadDataVolumeToPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteBitsOnPrimaryStorageMsg) {
handle((DeleteBitsOnPrimaryStorageMsg) msg);
} else if (msg instanceof ConnectPrimaryStorageMsg) {
handle((ConnectPrimaryStorageMsg) msg);
} else if (msg instanceof DownloadIsoToPrimaryStorageMsg) {
handleBase((DownloadIsoToPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteIsoFromPrimaryStorageMsg) {
handle((DeleteIsoFromPrimaryStorageMsg) msg);
} else if (msg instanceof AskVolumeSnapshotCapabilityMsg) {
handle((AskVolumeSnapshotCapabilityMsg) msg);
} else if (msg instanceof SyncVolumeSizeOnPrimaryStorageMsg) {
handle((SyncVolumeSizeOnPrimaryStorageMsg) msg);
} else if (msg instanceof PingPrimaryStorageMsg) {
handle((PingPrimaryStorageMsg) msg);
} else if (msg instanceof ChangePrimaryStorageStatusMsg) {
handle((ChangePrimaryStorageStatusMsg) msg);
} else if (msg instanceof ReconnectPrimaryStorageMsg) {
handle((ReconnectPrimaryStorageMsg) msg);
} else if (msg instanceof RevertVolumeFromSnapshotOnPrimaryStorageMsg) {
handle((RevertVolumeFromSnapshotOnPrimaryStorageMsg) msg);
} else if (msg instanceof ReInitRootVolumeFromTemplateOnPrimaryStorageMsg) {
handle((ReInitRootVolumeFromTemplateOnPrimaryStorageMsg) msg);
} else if (msg instanceof MergeVolumeSnapshotOnPrimaryStorageMsg) {
handle((MergeVolumeSnapshotOnPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteSnapshotOnPrimaryStorageMsg) {
handle((DeleteSnapshotOnPrimaryStorageMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
protected void handle(RecalculatePrimaryStorageCapacityMsg msg) {
RecalculatePrimaryStorageCapacityReply reply = new RecalculatePrimaryStorageCapacityReply();
PrimaryStorageCapacityRecalculator recalculator = new PrimaryStorageCapacityRecalculator();
recalculator.psUuids = Arrays.asList(msg.getPrimaryStorageUuid());
recalculator.recalculate();
bus.reply(msg, reply);
}
protected void handle(ReconnectPrimaryStorageMsg msg) {
ReconnectPrimaryStorageReply reply = new ReconnectPrimaryStorageReply();
doConnect(new ConnectParam(), new Completion(msg) {
@Override
public void success() {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(ChangePrimaryStorageStatusMsg msg) {
changeStatus(PrimaryStorageStatus.valueOf(msg.getStatus()));
ChangeVolumeSnapshotStatusReply reply = new ChangeVolumeSnapshotStatusReply();
bus.reply(msg, reply);
}
private void handle(final PingPrimaryStorageMsg msg) {
final PingPrimaryStorageReply reply = new PingPrimaryStorageReply();
pingHook(new Completion(msg) {
@Override
public void success() {
if (self.getStatus() == PrimaryStorageStatus.Disconnected) {
doConnect(new ConnectParam(), new NopeCompletion());
}
reply.setConnected(true);
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
changeStatus(PrimaryStorageStatus.Disconnected);
reply.setConnected(false);
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handleBase(DownloadIsoToPrimaryStorageMsg msg) {
checkIfBackupStorageAttachedToMyZone(msg.getIsoSpec().getSelectedBackupStorage().getBackupStorageUuid());
checkImageIfNeedToDownload(msg);
handle(msg);
}
private void doConnect(ConnectParam param, final Completion completion) {
thdf.chainSubmit(new ChainTask(completion) {
@Override
public String getSyncSignature() {
return String.format("reconnect-primary-storage-%s", self.getUuid());
}
@Override
public void run(SyncTaskChain chain) {
changeStatus(PrimaryStorageStatus.Connecting);
connectHook(param, new Completion(chain, completion) {
@Override
public void success() {
RecalculatePrimaryStorageCapacityMsg rmsg = new RecalculatePrimaryStorageCapacityMsg();
rmsg.setPrimaryStorageUuid(self.getUuid());
bus.makeLocalServiceId(rmsg, PrimaryStorageConstant.SERVICE_ID);
bus.send(rmsg);
self = dbf.reload(self);
changeStatus(PrimaryStorageStatus.Connected);
logger.debug(String.format("successfully connected primary storage[uuid:%s]", self.getUuid()));
tracker.track(self.getUuid());
completion.success();
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
tracker.track(self.getUuid());
self = dbf.reload(self);
changeStatus(PrimaryStorageStatus.Disconnected);
logger.debug(String.format("failed to connect primary storage[uuid:%s], %s", self.getUuid(), errorCode));
completion.fail(errorCode);
chain.next();
}
});
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void handle(final ConnectPrimaryStorageMsg msg) {
final ConnectPrimaryStorageReply reply = new ConnectPrimaryStorageReply();
ConnectParam param = new ConnectParam();
param.newAdded = msg.isNewAdded();
doConnect(param, new Completion(msg) {
@Override
public void success() {
reply.setConnected(true);
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
if (msg.isNewAdded()) {
reply.setError(errorCode);
} else {
reply.setConnected(false);
}
bus.reply(msg, reply);
}
});
}
private void handleBase(DownloadDataVolumeToPrimaryStorageMsg msg) {
checkIfBackupStorageAttachedToMyZone(msg.getBackupStorageRef().getBackupStorageUuid());
handle(msg);
}
@Transactional(readOnly = true)
private void checkIfBackupStorageAttachedToMyZone(String bsUuid) {
String sql = "select bs.uuid" +
" from BackupStorageVO bs, BackupStorageZoneRefVO ref" +
" where bs.uuid = ref.backupStorageUuid" +
" and ref.zoneUuid = :zoneUuid" +
" and bs.uuid = :bsUuid";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("zoneUuid", self.getZoneUuid());
q.setParameter("bsUuid", bsUuid);
if (q.getResultList().isEmpty()) {
throw new OperationFailureException(operr("backup storage[uuid:%s] is not attached to zone[uuid:%s] the primary storage[uuid:%s] belongs to",
bsUuid, self.getZoneUuid(), self.getUuid()));
}
}
private void handleBase(CreateTemplateFromVolumeOnPrimaryStorageMsg msg) {
checkIfBackupStorageAttachedToMyZone(msg.getBackupStorageUuid());
handle(msg);
}
private void handle(final DetachPrimaryStorageFromClusterMsg msg) {
final DetachPrimaryStorageFromClusterReply reply = new DetachPrimaryStorageFromClusterReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
extpEmitter.beforeDetach(self, msg.getClusterUuid());
detachHook(msg.getClusterUuid(), new Completion(msg, chain) {
@Override
public void success() {
self = dbf.reload(self);
extpEmitter.afterDetach(self, msg.getClusterUuid());
logger.debug(String.format("successfully detached primary storage[name: %s, uuid:%s]",
self.getName(), self.getUuid()));
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
extpEmitter.failToDetach(self, msg.getClusterUuid());
logger.warn(errorCode.toString());
reply.setError(errf.instantiateErrorCode(PrimaryStorageErrors.DETACH_ERROR, errorCode));
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("detach-primary-storage-%s-from-%s", self.getUuid(), msg.getClusterUuid());
}
});
}
private void handle(PrimaryStorageDeletionMsg msg) {
PrimaryStorageInventory inv = PrimaryStorageInventory.valueOf(self);
extpEmitter.beforeDelete(inv);
deleteHook();
extpEmitter.afterDelete(inv);
tracker.untrack(self.getUuid());
PrimaryStorageDeletionReply reply = new PrimaryStorageDeletionReply();
bus.reply(msg, reply);
}
@Transactional
private void updateCapacity(long total, long avail) {
PrimaryStorageCapacityVO cvo = dbf.getEntityManager().find(PrimaryStorageCapacityVO.class,
self.getUuid(), LockModeType.PESSIMISTIC_WRITE);
DebugUtils.Assert(cvo != null, String.format("how can there is no PrimaryStorageCapacityVO[uuid:%s]", self.getUuid()));
cvo.setTotalPhysicalCapacity(total);
cvo.setAvailablePhysicalCapacity(avail);
dbf.getEntityManager().merge(cvo);
}
private void handle(PrimaryStorageReportPhysicalCapacityMsg msg) {
updateCapacity(msg.getTotalCapacity(), msg.getAvailableCapacity());
bus.reply(msg, new MessageReply());
}
protected void handleApiMessage(APIMessage msg) {
if (msg instanceof APIDeletePrimaryStorageMsg) {
handle((APIDeletePrimaryStorageMsg) msg);
} else if (msg instanceof APIChangePrimaryStorageStateMsg) {
handle((APIChangePrimaryStorageStateMsg) msg);
} else if (msg instanceof APIAttachPrimaryStorageToClusterMsg) {
handle((APIAttachPrimaryStorageToClusterMsg) msg);
} else if (msg instanceof APIDetachPrimaryStorageFromClusterMsg) {
handle((APIDetachPrimaryStorageFromClusterMsg) msg);
} else if (msg instanceof APIReconnectPrimaryStorageMsg) {
handle((APIReconnectPrimaryStorageMsg) msg);
} else if (msg instanceof APIUpdatePrimaryStorageMsg) {
handle((APIUpdatePrimaryStorageMsg) msg);
} else if (msg instanceof APISyncPrimaryStorageCapacityMsg) {
handle((APISyncPrimaryStorageCapacityMsg) msg);
} else if (msg instanceof APICleanUpImageCacheOnPrimaryStorageMsg) {
handle((APICleanUpImageCacheOnPrimaryStorageMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
protected void handle(APICleanUpImageCacheOnPrimaryStorageMsg msg) {
throw new OperationFailureException(operr("operation not supported"));
}
private void handle(final APISyncPrimaryStorageCapacityMsg msg) {
final APISyncPrimaryStorageCapacityEvent evt = new APISyncPrimaryStorageCapacityEvent(msg.getId());
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("sync-capacity-of-primary-storage-%s", self.getUuid()));
chain.then(new ShareFlow() {
Long volumeUsage;
Long snapshotUsage;
Long totalPhysicalSize;
Long availablePhysicalSize;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "sync-capacity-used-by-volumes";
@Override
public void run(final FlowTrigger trigger, Map data) {
VolumeReportPrimaryStorageCapacityUsageMsg msg = new VolumeReportPrimaryStorageCapacityUsageMsg();
msg.setPrimaryStorageUuid(self.getUuid());
bus.makeLocalServiceId(msg, VolumeConstant.SERVICE_ID);
bus.send(msg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
VolumeReportPrimaryStorageCapacityUsageReply r = reply.castReply();
volumeUsage = r.getUsedCapacity();
volumeUsage = ratioMgr.calculateByRatio(self.getUuid(), volumeUsage);
trigger.next();
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "sync-capacity-used-by-volume-snapshots";
@Override
public void run(final FlowTrigger trigger, Map data) {
VolumeSnapshotReportPrimaryStorageCapacityUsageMsg msg = new VolumeSnapshotReportPrimaryStorageCapacityUsageMsg();
msg.setPrimaryStorageUuid(self.getUuid());
bus.makeLocalServiceId(msg, VolumeSnapshotConstant.SERVICE_ID);
bus.send(msg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
// note: snapshot size is physical size,
// don't calculate over-provisioning here
VolumeSnapshotReportPrimaryStorageCapacityUsageReply r = reply.castReply();
snapshotUsage = r.getUsedSize();
trigger.next();
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "sync-physical-capacity";
@Override
public void run(final FlowTrigger trigger, Map data) {
syncPhysicalCapacity(new ReturnValueCompletion<PhysicalCapacityUsage>(trigger) {
@Override
public void success(PhysicalCapacityUsage returnValue) {
totalPhysicalSize = returnValue.totalPhysicalSize;
availablePhysicalSize = returnValue.availablePhysicalSize;
availablePhysicalSize = availablePhysicalSize < 0 ? 0 : availablePhysicalSize;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
writeToDb();
self = dbf.reload(self);
evt.setInventory(getSelfInventory());
bus.publish(evt);
}
private void writeToDb() {
PrimaryStorageCapacityUpdater updater = new PrimaryStorageCapacityUpdater(self.getUuid());
updater.run(new PrimaryStorageCapacityUpdaterRunnable() {
@Override
public PrimaryStorageCapacityVO call(PrimaryStorageCapacityVO cap) {
long avail = cap.getTotalCapacity() - volumeUsage - snapshotUsage;
cap.setAvailableCapacity(avail);
cap.setAvailablePhysicalCapacity(availablePhysicalSize);
cap.setTotalPhysicalCapacity(totalPhysicalSize);
return cap;
}
});
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(errCode);
bus.publish(evt);
}
});
}
}).start();
}
protected void updatePrimaryStorage(APIUpdatePrimaryStorageMsg msg, ReturnValueCompletion<PrimaryStorageVO> completion) {
boolean update = false;
if (msg.getName() != null) {
self.setName(msg.getName());
update = true;
}
if (msg.getDescription() != null) {
self.setDescription(msg.getDescription());
update = true;
}
completion.success(update? self : null);
}
private void handle(APIUpdatePrimaryStorageMsg msg) {
APIUpdatePrimaryStorageEvent evt = new APIUpdatePrimaryStorageEvent(msg.getId());
updatePrimaryStorage(msg, new ReturnValueCompletion<PrimaryStorageVO>(msg) {
@Override
public void success(PrimaryStorageVO vo) {
if (vo != null){
self = dbf.updateAndRefresh(vo);
}
evt.setInventory(getSelfInventory());
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
}
});
}
protected void changeStatus(PrimaryStorageStatus status) {
if (status == self.getStatus()) {
return;
}
PrimaryStorageStatus oldStatus = self.getStatus();
self.setStatus(status);
self = dbf.updateAndRefresh(self);
PrimaryStorageStatusChangedData d = new PrimaryStorageStatusChangedData();
d.setInventory(PrimaryStorageInventory.valueOf(self));
d.setPrimaryStorageUuid(self.getUuid());
d.setOldStatus(oldStatus.toString());
d.setNewStatus(status.toString());
evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_STATUS_CHANGED_PATH, d);
logger.debug(String.format("the primary storage[uuid:%s, name:%s] changed status from %s to %s",
self.getUuid(), self.getName(), oldStatus, status));
}
protected void handle(APIReconnectPrimaryStorageMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(SyncTaskChain chain) {
final APIReconnectPrimaryStorageEvent evt = new APIReconnectPrimaryStorageEvent(msg.getId());
doConnect(new ConnectParam(), new Completion(msg, chain) {
@Override
public void success() {
evt.setInventory(getSelfInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "reconnect-primary-storage";
}
});
}
// don't use chainTask for this method, the sub-sequential DetachPrimaryStorageFromClusterMsg
// is in the queue
protected void handle(final APIDetachPrimaryStorageFromClusterMsg msg) {
final APIDetachPrimaryStorageFromClusterEvent evt = new APIDetachPrimaryStorageFromClusterEvent(msg.getId());
try {
extpEmitter.preDetach(self, msg.getClusterUuid());
} catch (PrimaryStorageException e) {
throw new OperationFailureException(errf.instantiateErrorCode(PrimaryStorageErrors.DETACH_ERROR, e.getMessage()));
}
// if not, HA will allocate wrong host, rollback when API fail
SimpleQuery<PrimaryStorageClusterRefVO> q = dbf.createQuery(PrimaryStorageClusterRefVO.class);
q.add(PrimaryStorageClusterRefVO_.clusterUuid, Op.EQ, msg.getClusterUuid());
q.add(PrimaryStorageClusterRefVO_.primaryStorageUuid, Op.EQ, msg.getPrimaryStorageUuid());
List<PrimaryStorageClusterRefVO> refs = q.list();
dbf.removeCollection(refs, PrimaryStorageClusterRefVO.class);
String issuer = PrimaryStorageVO.class.getSimpleName();
List<PrimaryStorageDetachStruct> ctx = new ArrayList<>();
PrimaryStorageDetachStruct struct = new PrimaryStorageDetachStruct();
struct.setClusterUuid(msg.getClusterUuid());
struct.setPrimaryStorageUuid(msg.getPrimaryStorageUuid());
ctx.add(struct);
casf.asyncCascade(PrimaryStorageConstant.PRIMARY_STORAGE_DETACH_CODE, issuer, ctx, new Completion(msg) {
@Override
public void success() {
self = dbf.reload(self);
evt.setInventory(PrimaryStorageInventory.valueOf(self));
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
//has removed RefVO before, roll back
dbf.updateAndRefresh(refs.get(0));
evt.setError(errorCode);
bus.publish(evt);
}
});
}
protected void handle(final APIAttachPrimaryStorageToClusterMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
attachCluster(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return String.format("attach-primary-storage-%s-to-cluster-%s", self.getUuid(), msg.getClusterUuid());
}
});
}
private void attachCluster(final APIAttachPrimaryStorageToClusterMsg msg, final NoErrorCompletion completion) {
final APIAttachPrimaryStorageToClusterEvent evt = new APIAttachPrimaryStorageToClusterEvent(msg.getId());
try {
extpEmitter.preAttach(self, msg.getClusterUuid());
} catch (PrimaryStorageException pe) {
evt.setError(errf.instantiateErrorCode(PrimaryStorageErrors.ATTACH_ERROR, pe.getMessage()));
bus.publish(evt);
completion.done();
return;
}
extpEmitter.beforeAttach(self, msg.getClusterUuid());
attachHook(msg.getClusterUuid(), new Completion(msg, completion) {
@Override
public void success() {
PrimaryStorageClusterRefVO ref = new PrimaryStorageClusterRefVO();
ref.setClusterUuid(msg.getClusterUuid());
ref.setPrimaryStorageUuid(self.getUuid());
dbf.persist(ref);
self = dbf.reload(self);
extpEmitter.afterAttach(self, msg.getClusterUuid());
PrimaryStorageInventory pinv = (PrimaryStorageInventory) invf.valueOf(self);
evt.setInventory(pinv);
logger.debug(String.format("successfully attached primary storage[name:%s, uuid:%s]",
pinv.getName(), pinv.getUuid()));
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
extpEmitter.failToAttach(self, msg.getClusterUuid());
evt.setError(errf.instantiateErrorCode(PrimaryStorageErrors.ATTACH_ERROR, errorCode));
bus.publish(evt);
completion.done();
}
});
}
private void stopAllVms(List<String> vmUuids) {
final List<StopVmInstanceMsg> msgs = new ArrayList<StopVmInstanceMsg>();
for (String vmUuid : vmUuids) {
StopVmInstanceMsg msg = new StopVmInstanceMsg();
msg.setVmInstanceUuid(vmUuid);
bus.makeTargetServiceIdByResourceUuid(msg, VmInstanceConstant.SERVICE_ID, vmUuid);
msgs.add(msg);
}
bus.send(msgs, new CloudBusListCallBack(null) {
@Override
public void run(List<MessageReply> replies) {
StringBuilder sb = new StringBuilder();
boolean success = true;
for (MessageReply r : replies) {
if (!r.isSuccess()) {
StopVmInstanceMsg msg = msgs.get(replies.indexOf(r));
String err = String.format("\nfailed to stop vm[uuid:%s] on primary storage[uuid:%s], %s",
msg.getVmInstanceUuid(), self.getUuid(), r.getError());
sb.append(err);
success = false;
}
}
if (!success) {
logger.warn(sb.toString());
}
}
});
}
protected void handle(APIChangePrimaryStorageStateMsg msg) {
APIChangePrimaryStorageStateEvent evt = new APIChangePrimaryStorageStateEvent(msg.getId());
PrimaryStorageState currState = self.getState();
PrimaryStorageStateEvent event = PrimaryStorageStateEvent.valueOf(msg.getStateEvent());
PrimaryStorageState nextState = AbstractPrimaryStorage.getNextState(currState, event);
try {
extpEmitter.preChange(self, event);
} catch (PrimaryStorageException e) {
evt.setError(errf.instantiateErrorCode(SysErrors.CHANGE_RESOURCE_STATE_ERROR, e.getMessage()));
bus.publish(evt);
return;
}
extpEmitter.beforeChange(self, event);
if (PrimaryStorageStateEvent.maintain == event) {
logger.warn(String.format("Primary Storage %s will enter maintenance mode, ignore unknown status VMs", msg.getPrimaryStorageUuid()));
List<String> vmUuids = SQL.New("select vm.uuid from VmInstanceVO vm, VolumeVO vol" +
" where vol.primaryStorageUuid =:uuid and vol.vmInstanceUuid = vm.uuid and vol.type = :volType", String.class)
.param("uuid", self.getUuid()).param("volType", VolumeType.Root).list();
if ( vmUuids.size() != 0 ) {
stopAllVms(vmUuids);
}
}
changeStateHook(event, nextState);
self.setState(nextState);
self = dbf.updateAndRefresh(self);
extpEmitter.afterChange(self, event, currState);
evt.setInventory(PrimaryStorageInventory.valueOf(self));
bus.publish(evt);
}
protected void handle(APIDeletePrimaryStorageMsg msg) {
final APIDeletePrimaryStorageEvent evt = new APIDeletePrimaryStorageEvent(msg.getId());
final String issuer = PrimaryStorageVO.class.getSimpleName();
final List<PrimaryStorageInventory> ctx = PrimaryStorageInventory.valueOf(Arrays.asList(self));
self.setState(PrimaryStorageState.Deleting);
self = dbf.updateAndRefresh(self);
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("delete-primary-storage-%s", msg.getUuid()));
if (msg.getDeletionMode() == APIDeleteMessage.DeletionMode.Permissive) {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
} else {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
}
// Due to issue #1412, deleting PS asynchronously might leave VmInstanceEO in
// database. Since eoCleanup() could be called before deleting VmInstanceVO.
chain.then(new NoRollbackFlow() {
@Override
public void run(FlowTrigger trigger, Map data) {
casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
chain.done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.publish(evt);
PrimaryStorageDeletedData d = new PrimaryStorageDeletedData();
d.setPrimaryStorageUuid(self.getUuid());
d.setInventory(PrimaryStorageInventory.valueOf(self));
evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_DELETED_PATH, d);
}
}).error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(errf.instantiateErrorCode(SysErrors.DELETE_RESOURCE_ERROR, errCode));
bus.publish(evt);
}
}).start();
}
// don't attach any cluster
public boolean isUnmounted() {
long count = Q.New(PrimaryStorageClusterRefVO.class)
.eq(PrimaryStorageClusterRefVO_.primaryStorageUuid, this.self.getUuid()).count();
return count == 0;
}
@VmAttachVolumeValidatorMethod
static void vmAttachVolumeValidator(String vmUuid, String volumeUuid) {
PrimaryStorageState state = SQL.New("select pri.state from PrimaryStorageVO pri " +
"where pri.uuid = (select vol.primaryStorageUuid from VolumeVO vol where vol.uuid = :volUuid)", PrimaryStorageState.class)
.param("volUuid", volumeUuid)
.find();
if(state == PrimaryStorageState.Maintenance){
throw new OperationFailureException(
operr("cannot attach volume[uuid:%s] whose primary storage is Maintenance", volumeUuid));
}
}
}
| |
/* Copyright (c) 2001-2010, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.jdbc.pool;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.sql.Array;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.hsqldb.lib.IntValueHashMap;
/**
* An <tt>InvocationHandler</tt> that facilitates <tt>Connection</tt> and
* <tt>Statement</tt> pooling.
*
* The primary function is to avoid directly exposing close() and isClosed()
* methods on physical <tt>Connection</tt> and <tt>Statement</tt> objects that
* are participating in a pooling implementation.
*
* The secondary function is to assist the pooling mechanism by providing
* check in notification for <tt>Connection</tt> objects and
* check in / check out notification for derived <tt>Statement</tt> objects.
*
* @author boucherb@users
* @version 1.9.0
* @since 1.9.0
*/
public class WrapperInvocationHandler implements InvocationHandler {
// First, some helper definitions
/**
* Uniquely identifies, by the generating invocation signature, a
* physical <tt>Statement</tt> object or like collection thereof. <p>
*/
public final class StatementKey {
// assigned in constructor
private final Method method;
private final Object[] args;
// derived
private int hashCode;
/**
* Constructs a new <tt>StatementKey</tt> from the given invocation
* signature.
*
* @param method the invocation's method
* @param args the invocation's arguments
*/
StatementKey(Method method, Object[] args) {
this.method = method;
this.args = (args == null) ? null
: (Object[]) args.clone();
}
/**
* Returns the hash code value for this object. <p>
*
* This method is supported to allow semantically correct participation
* as a key in a keyed <tt>Collection</tt> (e.g. a <tt>Map</tt>),
* <tt>Hashtable</tt> or as an element in a <tt>HashSet</tt>.
*
* @return The hash code value for this object
* @see #equals(java.lang.Object)
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
if (hashCode == 0) {
int h = method.hashCode();
if (args != null) {
for (int i = args.length - 1; i >= 0; i--) {
if (args[i] != null) {
h = 31 * h + args[i].hashCode();
}
}
}
hashCode = h;
}
return hashCode;
}
/**
* Indicates whether some other object is "equal to" this one. <p>
*
* An object is equal to a <tt>StatementKey</tt> if it is the same
* object or it is a different StatementKey having an equivalent
* method and argument array.
*
* @param obj the reference object with which to compare.
* @return <code>true</code> if this object is the same as the obj
* argument; <code>false</code> otherwise.
* @see #hashCode()
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof StatementKey) {
StatementKey other = (StatementKey) obj;
return (this.method.equals(other.method)
&& ((this.args == other.args)
|| Arrays.equals(this.args, other.args)));
} else {
return false;
}
}
/**
* Retrieves the <tt>Method</tt> object with which this key was
* constructed. <p>
*
* @return the <tt>Method</tt> object with which this key was
* constructed.
*/
public Method getMethod() {
return this.method;
}
/**
* Retrieves a copy of the argument array which this key was
* constructed. <p>
*
* @return a copy of the argument array which this key was
* constructed
*/
public Object[] getArgs() {
return (args == null) ? null
: (Object[]) args.clone();
}
}
/**
* Interface required to cooperate with a <tt>Statement</tt> pool
* implementation. <p>
*/
public interface StatementPool {
/**
* Indicates to an underlying pool that the given physical
* <tt>Statement</tt> object is no longer in use by a
* surrogate. <p>
*
* @param key an object representing the invocation signature with
* which the surrogate's invocation handler originally generated
* the statement.
* @param stmt the physical <tt>Statement</tt> object
*/
public void checkIn(StatementKey key, Statement stmt);
/**
* Asks an underlying pool for a physical <tt>Statement</tt> object
* compatible with the invocation signature represented by the given
* key. <p>
*
* The pool may respond with a <tt>null</tt> value, in which case it is
* the job of the invocation handler to delegate to the underlying
* physical <tt>Connection</tt>.
*
* @param key an object representing the invocation signature used
* to request a statement.
*/
public Statement checkOut(StatementKey key);
/**
* Retrieves whether the given physical <tt>Statement</tt> object is
* poolable. <p>
*
* If it is not, then the invocation handler is free to skip checking
* the <tt>Statement</tt> back in when its surrogate is closed,
* closing the <tt>Statement</tt> directly, instead. On the other
* hand, a well-written statement pool implementation should correctly
* handle checkin of non-poolable statements by closing them.
*/
public boolean isPoolable(Statement stmt);
}
/**
* Interface required to cooperate with a <tt>Connection</tt> pool
* implementation. <p>
*
* A <tt>DataSource</tt> handles checkout of physical connections from an
* underlying pool and exposes a <tt>WrapperInvocationHandler.ConnectionPool</tt>
* interface to allow each physical <tt>Connection</tt> object's
* corresponding <tt>WrapperInvocationHandler</tt> to check it back in to
* the underlying pool when the surrogate is closed.
*/
public interface ConnectionPool {
/**
* Returns a physical <tt>Connection</tt> to an underlying pool. <p>
*
* @param connection The physical connection object being returned to
* the pool.
* @param statementPool The implementation originally provided
* by the pooling implementation to facilitate statement reuse
* against the given <tt>Connection</tt> object.
*/
void checkIn(Connection connection, StatementPool statementPool);
}
// ----- Static computation of Methods that are sensitive to pooling -------
public static final int WIH_NO_SURROGATE = 0;
public static final int WIH_CLOSE_SURROGATE = 1;
public static final int WIH_IS_CLOSED_SURROGATE = 2;
public static final int WIH_GET_PARENT_SURROGATE = 3;
public static final int WIH_GET_DATABASEMETADATA_SURROGATE = 4;
public static final int WIH_CREATE_OR_PREPARE_STATEMENT_SURROGATE = 5;
public static final int WIH_GET_RESULTSET_SURROGATE = 6;
public static final int WIH_UNWRAP_SURROGATE = 7;
public static final int WIH_GET_ARRAY_SURROGATE = 8;
protected static final IntValueHashMap methodMap = new IntValueHashMap();
// ------- Interfaces having methods that are sensitive to pooling ---------
protected static final Class[] arrayInterface = new Class[]{ Array.class };
protected static final Class[] connectionInterface = new Class[]{
Connection.class };
protected static final Class[] callableStatementInterface = new Class[]{
CallableStatement.class };
protected static final Class[] databaseMetaDataInterface = new Class[]{
DatabaseMetaData.class };
//protected static final Class[] parameterMetaDataInterface
// = new Class[]{ParameterMetaData.class};
protected static final Class[] preparedStatementInterface = new Class[]{
PreparedStatement.class };
//protected static final Class[] resultSetMetaDataInterface
// = new Class[]{ResultSetMetaData.class};
protected static final Class[] resultSetInterface = new Class[]{
ResultSet.class };
protected static final Class[] statementInterface = new Class[]{
Statement.class };
// ------------------ Static Initialization Helper Methods -----------------
/**
* Simple test used only during static initialization.
*
* @param clazz reflecting the given public member method
* @param method to test
* @return true if close() method of poolable class
*/
protected static boolean _isCloseSurrogateMethod(final Class clazz,
final Method method) {
return ((Connection.class.isAssignableFrom(
clazz) || Statement.class.isAssignableFrom(
clazz)) && "close".equals(method.getName()));
}
/**
* Simple test used only during static initialization.
*
* @param clazz reflecting the given public member method
* @param method to test
* @return true if isClosed() method of poolable class
*/
protected static boolean _isIsClosedSurrogateMethod(final Class clazz,
final Method method) {
return ((Connection.class.isAssignableFrom(
clazz) || Statement.class.isAssignableFrom(
clazz)) && "isClosed".equals(method.getName()));
}
// /**
// *
// * Simple test used only during static initialization.
// *
// * @param clazz reflecting the given public member method
// * @param method to test
// * @return true if isWrapperFor() method of class exposes pooling-senstive
// * behavior
// */
// protected static boolean isIsWrapperForMethod(final Method method) {
// return "isWrapperFor".equals(method.getName());
// }
/**
* Simple test used only during static initialization.
*
* @param method to test
* @return true if method is an unwrap() method
*/
protected static boolean _isUnwrapMethod(final Method method) {
return "unwrap".equals(method.getName());
}
// ------------------------ Static Initialization --------------------------
static {
Class[] poolingSensitiveInterfaces = new Class[] {
java.sql.Array.class, java.sql.CallableStatement.class,
java.sql.Connection.class, java.sql.DatabaseMetaData.class,
// unlikely to expose raw delegate of interest
//java.sql.ParameterMetaData.class,
java.sql.PreparedStatement.class, java.sql.ResultSet.class,
// unlikely to expose raw delegate of interest
//java.sql.ResultSetMetaData.class,
java.sql.Statement.class
};
for (int i = 0; i < poolingSensitiveInterfaces.length; i++) {
Class clazz = poolingSensitiveInterfaces[i];
Method[] methods = clazz.getMethods();
for (int j = 0; j < methods.length; j++) {
Method method = methods[j];
Class returnType = method.getReturnType();
if (_isCloseSurrogateMethod(clazz, method)) {
methodMap.put(method, WIH_CLOSE_SURROGATE);
} else if (_isIsClosedSurrogateMethod(clazz, method)) {
methodMap.put(method, WIH_IS_CLOSED_SURROGATE);
} else if (Array.class.isAssignableFrom(returnType)) {
methodMap.put(method, WIH_GET_ARRAY_SURROGATE);
} else if (Connection.class.isAssignableFrom(returnType)) {
methodMap.put(method, WIH_GET_PARENT_SURROGATE);
} else if (Statement.class.isAssignableFrom(returnType)) {
String methodName = method.getName();
if (methodName.startsWith("create")
|| methodName.startsWith("prepare")) {
methodMap.put(
method, WIH_CREATE_OR_PREPARE_STATEMENT_SURROGATE);
} else {
methodMap.put(method, WIH_GET_PARENT_SURROGATE);
}
} else if (ResultSet.class.isAssignableFrom(returnType)) {
methodMap.put(method, WIH_GET_RESULTSET_SURROGATE);
} else if (DatabaseMetaData.class.isAssignableFrom(
returnType)) {
methodMap.put(method, WIH_GET_DATABASEMETADATA_SURROGATE);
//} else if (ParameterMetaData.class.
// isAssignableFrom(returnType)) {
// *************************************************************
//} else if (ResultSetMetaData.class.
// isAssignableFrom(returnType)) {
// *************************************************************
//} else if (isIsWrapperForMethod(method)) {
// *************************************************************
} else if (_isUnwrapMethod(method)) {
methodMap.put(method, WIH_UNWRAP_SURROGATE);
}
}
}
}
// ----------------------- Construction Utility Method ---------------------
/**
* Given a delegate, retrieves the interface that must be implemented by a
* surrogate dynamic proxy to ensure pooling sensitive methods
* of the delegate are not exposed directly to clients.
*
* @param delegate the target delegate of interest
* @return the interface that must be implemented by a surrogate dynamic
* proxy to ensure pooling sensitive methods of the delegate are
* not exposed directly to clients
*/
protected static Class[] _computeProxiedInterface(Object delegate) {
// NOTE: Order is important for XXXStatement.
if (delegate instanceof Array) {
return arrayInterface;
} else if (delegate instanceof Connection) {
return connectionInterface;
} else if (delegate instanceof CallableStatement) {
return callableStatementInterface;
} else if (delegate instanceof DatabaseMetaData) {
return databaseMetaDataInterface;
} else if (delegate instanceof PreparedStatement) {
return preparedStatementInterface;
} else if (delegate instanceof ResultSet) {
return resultSetInterface;
} else if (delegate instanceof Statement) {
return statementInterface;
} else {
return null;
}
}
/**
* Retrieves a numeric classification of the surrogate method type
* corresponding to the given delegate method.
*
* @param method the method to test
* @return the numeric classification
*/
protected static int _computeSurrogateType(Method method) {
return methodMap.get(method, WIH_NO_SURROGATE);
}
// ---------------------------- Instance Fields ----------------------------
// set in constructor
private Object delegate;
private Object surrogate;
private WrapperInvocationHandler parentHandler;
private ConnectionPool connectionPool;
private StatementPool statementPool;
// derivied
private WrapperInvocationHandler dbmdHandler;
private boolean surrogateClosed;
private StatementKey statementKey;
private Set resultSets; //ResultSet invocation handlers
private Set statements; //Statement invocation handlers
// ------------------------------ Constructors ---------------------------------
/**
* Constructs a new invocation handler for the given <tt>Connection</tt>.
*
* @param connection the <tt>Connection</tt> for which to construct an
* invocation handler
* @param connectionPool interface to an external connection pool; may be null
* @param statementPool interface to an external statement pool; may be null
* @throws java.lang.IllegalArgumentException if connection is null
*/
public WrapperInvocationHandler(Connection connection,
ConnectionPool connectionPool,
StatementPool statementPool)
throws IllegalArgumentException {
this(connection, null);
this.connectionPool = connectionPool;
this.statementPool = statementPool;
}
/**
* Constructs a new invocation handler for the given delegate and
* parent invocation handler. <p>
*
* @param delegate the delegate for which to construct the invocation handler
* @param parent the invocation handler of the delegate's parent; may be null.
* @throws IllegalArgumentException if delegate is null; or if its proxied
* interface cannot be determined; or if any of the restrictions on the
* parameters that may be passed to <code>Proxy.newProxyInstance</code>
* are violated
*/
public WrapperInvocationHandler(Object delegate,
WrapperInvocationHandler parent)
throws IllegalArgumentException {
if (delegate == null) {
throw new IllegalArgumentException("delegate: null");
}
Class[] proxiedInterface = _computeProxiedInterface(delegate);
if (proxiedInterface == null) {
throw new IllegalArgumentException("delegate: " + delegate);
}
this.delegate = delegate;
this.parentHandler = parent;
this.surrogate =
Proxy.newProxyInstance(proxiedInterface[0].getClassLoader(),
proxiedInterface, this);
}
// ----------------------- Interface Implementation ------------------------
/**
* Processes a method invocation on a proxy instance and returns
* the result. This method will be invoked on an invocation handler
* when a method is invoked on a proxy instance that it is
* associated with.
*
* @param proxy the proxy instance that the method was invoked on
*
* @param method the <code>Method</code> instance corresponding to
* the interface method invoked on the proxy instance. The declaring
* class of the <code>Method</code> object will be the interface that
* the method was declared in, which may be a superinterface of the
* proxy interface that the proxy class inherits the method through.
*
* @param args an array of objects containing the values of the
* arguments passed in the method invocation on the proxy instance,
* or <code>null</code> if interface method takes no arguments.
* Arguments of primitive types are wrapped in instances of the
* appropriate primitive wrapper class, such as
* <code>java.lang.Integer</code> or <code>java.lang.Boolean</code>.
*
* @return the value to return from the method invocation on the
* proxy instance. If the declared return type of the interface
* method is a primitive type, then the value returned by
* this method must be an instance of the corresponding primitive
* wrapper class; otherwise, it must be a type assignable to the
* declared return type. If the value returned by this method is
* <code>null</code> and the interface method's return type is
* primitive, then a <code>NullPointerException</code> will be
* thrown by the method invocation on the proxy instance. If the
* value returned by this method is otherwise not compatible with
* the interface method's declared return type as described above,
* a <code>ClassCastException</code> will be thrown by the method
* invocation on the proxy instance.
*
* @throws Throwable the exception to throw from the method
* invocation on the proxy instance. The exception's type must be
* assignable either to any of the exception types declared in the
* <code>throws</code> clause of the interface method or to the
* unchecked exception types <code>java.lang.RuntimeException</code>
* or <code>java.lang.Error</code>. If a checked exception is
* thrown by this method that is not assignable to any of the
* exception types declared in the <code>throws</code> clause of
* the interface method, then undeclared {@link Throwable} containing
* the exception that was thrown by this method will be thrown by the
* method invocation on the proxy instance.
*
* @see Throwable
*/
/**
* @todo: - Synchronization can be made more granular if performance suffers.
* - Requires some private lock objects and synchronized blocks in
* certain methods.
* - This was the obvious and easy synchronization point to pick
* initially for prototyping purposes
*/
public synchronized Object invoke(final Object proxy, final Method method,
final Object[] args) throws Throwable {
Object result;
switch (_computeSurrogateType(method)) {
case WIH_CLOSE_SURROGATE : {
closeSurrogate();
result = null;
break;
}
case WIH_IS_CLOSED_SURROGATE : {
result = isClosedSurrogate() ? Boolean.TRUE
: Boolean.FALSE;
break;
}
case WIH_GET_PARENT_SURROGATE : {
checkSurrogateClosed();
result = getParentSurrogate(method, args);
break;
}
case WIH_GET_DATABASEMETADATA_SURROGATE : {
checkSurrogateClosed();
result = getDatabaseMetaDataSurrogate(method, args);
break;
}
case WIH_CREATE_OR_PREPARE_STATEMENT_SURROGATE : {
checkSurrogateClosed();
result = getCreatedOrPreparedStatementSurrogate(method, args);
break;
}
case WIH_GET_RESULTSET_SURROGATE : {
checkSurrogateClosed();
result = getResultSetSurrogate(method, args);
break;
}
case WIH_UNWRAP_SURROGATE : {
checkSurrogateClosed();
result = unwrapSurrogate(method, args);
break;
}
case WIH_GET_ARRAY_SURROGATE : {
checkSurrogateClosed();
result = getArraySurrogate(method, args);
break;
}
case WIH_NO_SURROGATE :
default : {
checkSurrogateClosed();
result = method.invoke(delegate, args);
break;
}
}
return result;
}
// --------------------- java.lang.Object overrides ------------------------
/**
* Ensures the identity hash code is returned for all descendents
*
* @return the identity hashCode for this object.
*/
public final int hashCode() {
return System.identityHashCode(this);
}
/**
* Ensures identity equality semantics are preserved for all descendents.
*
* @param o the object with which to compare
* @return true if (this == o); else false
*/
public final boolean equals(Object o) {
return (this == o);
}
// -------------------------- Internal Implementation ----------------------
/**
* Checks if the surrogate is closed.
*
* @throws java.lang.Throwable if the surrogate is closed.
*/
protected void checkSurrogateClosed() throws Throwable {
if (isClosedSurrogate()) {
throw new SQLException("Surrogate Closed."); // TODO: better msg
}
}
/**
* Effectively closes the surrogate, possibly doing work toward
* enabling reuse of the delegate. <p>
*
* @throws java.lang.Throwable if an access error occurs during work to
* enable reuse of the delegate
*/
protected void closeSurrogate() throws Throwable {
if (this.surrogateClosed) {
return;
}
if (this.resultSets != null) {
Iterator it = this.resultSets.iterator();
// Changed to set of ResultSet invocation handlers so
// that handler resources can be cleaned up too.
while (it.hasNext()) {
WrapperInvocationHandler handler =
(WrapperInvocationHandler) it.next();
try {
((ResultSet) handler.delegate).close();
} catch (Exception ex) {}
try {
handler.closeSurrogate();
} catch (Exception e) {}
}
}
if (this.statements != null) {
Iterator it = this.statements.iterator();
while (it.hasNext()) {
WrapperInvocationHandler handler =
(WrapperInvocationHandler) it.next();
try {
handler.closeSurrogate();
} catch (Exception e) {}
}
}
if (this.dbmdHandler != null) {
try {
this.dbmdHandler.closeSurrogate();
} catch (Throwable ex) {}
}
Object delegate = this.delegate;
try {
if (delegate instanceof Connection) {
closeConnectionSurrogate();
} else if (delegate instanceof Statement) {
closeStatementSurrogate();
}
} finally {
this.delegate = null;
this.surrogate = null;
this.dbmdHandler = null;
this.parentHandler = null;
this.statementKey = null;
this.statementPool = null;
this.connectionPool = null;
this.surrogateClosed = true;
}
}
/**
* Does work toward enabling reuse of the delegate,
* when it is a Connection.
*
* @throws java.lang.Throwable the exception, if any, thrown by
* returning the delegate connection to the ConnectionPool
* designated at construction of the connection's
* invocation handler.
*/
protected void closeConnectionSurrogate() throws Throwable {
ConnectionPool connectionPool = this.connectionPool;
if (connectionPool == null) {
// CHECKME: policy?
// pool has "disapeared" or was never provided (why?): should
// "really" close the connection since it will no be reused.
Connection connection = (Connection) this.delegate;
try {
connection.close();
} catch (SQLException ex) {}
} else {
Connection connection = (Connection) this.delegate;
StatementPool statementPool = this.statementPool;
connectionPool.checkIn(connection, statementPool);
}
}
/**
* Does work toward enabling reuse of the delegate,
* when it is an instance of <tt>Statement</tt>.
*
* @throws java.lang.Throwable the exception, if any, thrown by
* returning the delegate statement to the StatementPool
* designated at construction of the statement's parent connection
* invocation handler.
*/
protected void closeStatementSurrogate() throws Throwable {
Statement stmt = (Statement) this.delegate;
StatementKey key = this.statementKey;
StatementPool statementPool = (this.parentHandler == null) ? null
: this.parentHandler
.statementPool;
if (key == null || statementPool == null
|| !statementPool.isPoolable(stmt)) {
try {
stmt.close();
} catch (Exception ex) {}
} else {
statementPool.checkIn(key, stmt);
}
}
/**
* Retrieves whether the surrogate is closed. <p>
*
* @return true if surrogate is closed; else false
* @throws java.lang.Throwable
*/
protected boolean isClosedSurrogate() throws Throwable {
if (this.surrogateClosed) {
return true;
}
// This part is overkill now, but does not introduce
// incorrect operation.
//
/**
* @todo: Special handling to check the parent is still desirable
* for Array surrogates (and any other proxied delegate
* that has a parent whose valid lifetime is at most the
* life of the parent and does not expose a public close()
* method through the JDBC API
*/
WrapperInvocationHandler parent = this.parentHandler;
if (parent != null && parent.isClosedSurrogate()) {
closeSurrogate();
}
return this.surrogateClosed;
}
/**
* Retrieves a surrogate for the parent <tt>Statement</tt> or
* <tt>Connection</tt> object of this handler's delegate.
*
* @param method that retrieves the delegate's parent object
* @param args required for method invocation
* @throws java.lang.Throwable the exception, if any, thrown by invoking
* the given method with the given arguments upon the delegate
* @return surrogate for the underlying parent object
*/
protected Object getParentSurrogate(final Method method,
final Object[] args) throws Throwable {
WrapperInvocationHandler parent = this.parentHandler;
return (parent == null) ? null
: parent.surrogate;
}
/**
* Surrogate for any method of the delegate that returns
* an instance of <tt>DatabaseMetaData</tt> object. <p>
*
* @param method returning <tt>DatabaseMetaData</tt>
* @param args to the method
* @throws java.lang.Throwable the exception, if any, thrown by invoking
* the given method with the given arguments upon the delegate
* @return surrogate for the underlying DatabaseMetaData object
*/
protected Object getDatabaseMetaDataSurrogate(final Method method,
final Object[] args) throws Throwable {
if (this.dbmdHandler == null) {
Object dbmd = method.invoke(this.delegate, args);
this.dbmdHandler = new WrapperInvocationHandler(dbmd, this);
}
return this.dbmdHandler.surrogate;
}
/**
* Surrogate for any method of the delegate that returns an instance of
* <tt>Statement</tt>. <p>
*
* @param method returning instance of <tt>Statement</tt>
* @param args to the method
* @throws java.lang.Throwable the exception, if any, thrown by invoking
* the given method with the given arguments upon the delegate
* @return surrogate for the delegate Statement object
*/
protected Object getCreatedOrPreparedStatementSurrogate(
final Method method, final Object[] args) throws Throwable {
WrapperInvocationHandler handler;
Object stmt = null;
StatementKey key = new StatementKey(method, args);
StatementPool pool = this.statementPool;
if (pool != null) {
stmt = pool.checkOut(key);
}
if (stmt == null) {
stmt = method.invoke(this.delegate, args);
}
handler = new WrapperInvocationHandler(stmt, this);
handler.statementKey = key;
if (this.statements == null) {
this.statements = new HashSet();
}
statements.add(handler);
return handler.surrogate;
}
/**
* Surrogate for any method of the delegate that returns an
* instance of <tt>ResultSet</tt>. <p>
*
* @param method returning a <tt>ResultSet</tt>
* @param args to the method
* @throws java.lang.Throwable the exception, if any, thrown by invoking
* the given method with the given arguments upon the delegate
* @return surrogate for the underlying ResultSet object
*/
protected Object getResultSetSurrogate(final Method method,
final Object[] args)
throws Throwable {
Object rs = method.invoke(this.delegate, args);
WrapperInvocationHandler handler = new WrapperInvocationHandler(rs,
this);
if (resultSets == null) {
resultSets = new HashSet();
}
// Changed to set of ResultSet invocation handlers so
// that handler resources can be cleaned up too.
resultSets.add(handler);
return handler.surrogate;
}
/**
* Surrogate for the delegate's unwrap(...) method. <p>
*
* If invocation of the method returns the delegate itself, then
* the delegate's surrogate is returned instead.
*
* @param method the unwrap method
* @param args the argument(s) to the unwrap method
* @throws java.lang.Throwable the exception, if any, thrown by invoking
* the given method with the given arguments upon the delegate
* @return proxy if the method returns the delegate itself; else the actual
* result of invoking the method upon the delegate.
*/
protected Object unwrapSurrogate(final Method method,
final Object[] args) throws Throwable {
Object result = method.invoke(this.delegate, args);
return (result == this.delegate) ? this.surrogate
: result;
}
/**
* Surrogate for any method of the delegate that returns an
* instance of <tt>Array</tt>. <p>
*
* @param method returning an <tt>Array</tt>
* @param args to the method
* @throws java.lang.Throwable the exception, if any, thrown by invoking
* the given method with the given arguments upon the delegate
* @return surrogate for the underlying Array object
*/
protected Object getArraySurrogate(final Method method,
final Object[] args)
throws java.lang.Throwable {
Object array = method.invoke(this.delegate, args);
WrapperInvocationHandler handler = new WrapperInvocationHandler(array,
this);
return handler.surrogate;
}
}
| |
package eu.dnetlib.iis.wf.report.pushgateway.process;
import eu.dnetlib.iis.common.java.PortBindings;
import eu.dnetlib.iis.common.schemas.ReportEntry;
import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.Gauge;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
public class PushMetricsProcessTest {
@Mock
private PushMetricsProcess.MetricPusherCreatorProducer metricPusherCreatorProducer;
@Mock
private PushMetricsProcess.MetricPusherProducer metricPusherProducer;
@Mock
private PushMetricsProcess.FileSystemProducer fileSystemProducer;
@Mock
private PushMetricsProcess.ReportLocationsFinder reportLocationsFinder;
@Mock
private PushMetricsProcess.LabeledMetricConfByPatternProducer labeledMetricConfByPatternProducer;
@Mock
private PushMetricsProcess.ReportEntryReader reportEntryReader;
@Mock
private PushMetricsProcess.ReportEntryConverter reportEntryConverter;
@Mock
private PushMetricsProcess.GaugesRegistrar gaugesRegistrar;
@Mock
private PushMetricsProcess.GroupingKeyProducer groupingKeyProducer;
@InjectMocks
private PushMetricsProcess pushMetricsProcess = new PushMetricsProcess();
@Test
public void runShouldDoNothingWhenMetricPusherCreatorCreationFails() {
// given
PortBindings portBindings = mock(PortBindings.class);
Configuration conf = mock(Configuration.class);
Map<String, String> parameters = Collections.singletonMap("reportsDir", "/path/to/report");
when(metricPusherCreatorProducer.create(anyMap())).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
}
@Test
public void runShouldDoNothingWhenMetricPusherCreationFails() {
// given
PortBindings portBindings = mock(PortBindings.class);
Configuration conf = mock(Configuration.class);
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
when(metricPusherProducer.create(any(MetricPusherCreator.class), anyMap())).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
}
@Test
public void runShouldDoNothingWhenFileSystemCreationFails() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
when(fileSystemProducer.create(any(Configuration.class))).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(metricPusher, never()).pushSafe(any(), any(), any());
}
@Test
public void runShouldDoNothingWhenReportLocationsFinderFails() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
FileSystem fs = mock(FileSystem.class);
when(fileSystemProducer.create(conf)).thenReturn(Optional.of(fs));
when(reportLocationsFinder.find(anyMap())).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(reportLocationsFinder, times(1)).find(parameters);
verify(metricPusher, never()).pushSafe(any(), any(), any());
}
@Test
public void runShouldDoNothingWhenLabelNamesByMetricNameCreatorFails() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
FileSystem fs = mock(FileSystem.class);
when(fileSystemProducer.create(conf)).thenReturn(Optional.of(fs));
when(reportLocationsFinder.find(parameters)).thenReturn(Optional.of(Collections.singletonList("/path/to/report")));
when(labeledMetricConfByPatternProducer.create(anyMap())).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(reportLocationsFinder, times(1)).find(parameters);
verify(labeledMetricConfByPatternProducer, times(1)).create(parameters);
verify(metricPusher, never()).pushSafe(any(), any(), any());
}
@Test
public void runShouldDoNothingWhenReportReadFails() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
FileSystem fs = mock(FileSystem.class);
when(fileSystemProducer.create(conf)).thenReturn(Optional.of(fs));
when(reportLocationsFinder.find(parameters)).thenReturn(Optional.of(Collections.singletonList("/path/to/report")));
when(labeledMetricConfByPatternProducer.create(parameters)).thenReturn(Optional.of(Collections.emptyMap()));
when(reportEntryReader.read(any(FileSystem.class), any(Path.class))).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(reportLocationsFinder, times(1)).find(parameters);
verify(labeledMetricConfByPatternProducer, times(1)).create(parameters);
verify(reportEntryReader, times(1)).read(fs, new Path("/path/to/report"));
verify(metricPusher, never()).pushSafe(any(), any(), any());
}
@Test
public void runShouldDoNothingWhenReportConversionFails() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
FileSystem fs = mock(FileSystem.class);
when(fileSystemProducer.create(conf)).thenReturn(Optional.of(fs));
when(reportLocationsFinder.find(parameters)).thenReturn(Optional.of(Collections.singletonList("/path/to/report")));
when(labeledMetricConfByPatternProducer.create(parameters)).thenReturn(Optional.of(Collections.emptyMap()));
List<ReportEntry> reportEntries = Collections.singletonList(mock(ReportEntry.class));
when(reportEntryReader.read(fs, new Path("/path/to/report"))).thenReturn(Optional.of(reportEntries));
when(reportEntryConverter.convert(anyList(), any(String.class), anyMap())).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(reportLocationsFinder, times(1)).find(parameters);
verify(labeledMetricConfByPatternProducer, times(1)).create(parameters);
verify(reportEntryReader, times(1)).read(fs, new Path("/path/to/report"));
verify(reportEntryConverter, times(1)).convert(reportEntries, "/path/to/report", Collections.emptyMap());
verify(metricPusher, never()).pushSafe(any(), any(), any());
}
@Test
public void runShouldDoNothingWhenGaugesRegistrationFails() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
FileSystem fs = mock(FileSystem.class);
when(fileSystemProducer.create(conf)).thenReturn(Optional.of(fs));
when(reportLocationsFinder.find(parameters)).thenReturn(Optional.of(Collections.singletonList("/path/to/report")));
when(labeledMetricConfByPatternProducer.create(parameters)).thenReturn(Optional.of(Collections.emptyMap()));
List<ReportEntry> reportEntries = Collections.singletonList(mock(ReportEntry.class));
when(reportEntryReader.read(fs, new Path("/path/to/report"))).thenReturn(Optional.of(reportEntries));
List<Gauge> gauges = Collections.singletonList(mock(Gauge.class));
when(reportEntryConverter.convert(reportEntries, "/path/to/report", Collections.emptyMap())).thenReturn(Optional.of(gauges));
when(gaugesRegistrar.register(anyList())).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(reportLocationsFinder, times(1)).find(parameters);
verify(labeledMetricConfByPatternProducer, times(1)).create(parameters);
verify(reportEntryReader, times(1)).read(fs, new Path("/path/to/report"));
verify(reportEntryConverter, times(1)).convert(reportEntries, "/path/to/report", Collections.emptyMap());
verify(gaugesRegistrar, times(1)).register(gauges);
verify(metricPusher, never()).pushSafe(any(), any(), any());
}
@Test
public void runShouldDoNothingWhenJobNameCreationFails() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
FileSystem fs = mock(FileSystem.class);
when(fileSystemProducer.create(conf)).thenReturn(Optional.of(fs));
when(reportLocationsFinder.find(parameters)).thenReturn(Optional.of(Collections.singletonList("/path/to/report")));
when(labeledMetricConfByPatternProducer.create(parameters)).thenReturn(Optional.of(Collections.emptyMap()));
List<ReportEntry> reportEntries = Collections.singletonList(mock(ReportEntry.class));
when(reportEntryReader.read(fs, new Path("/path/to/report"))).thenReturn(Optional.of(reportEntries));
List<Gauge> gauges = Collections.singletonList(mock(Gauge.class));
when(reportEntryConverter.convert(reportEntries, "/path/to/report", Collections.emptyMap())).thenReturn(Optional.of(gauges));
CollectorRegistry collectorRegistry = mock(CollectorRegistry.class);
when(gaugesRegistrar.register(gauges)).thenReturn(Optional.of(collectorRegistry));
when(groupingKeyProducer.create(anyMap())).thenReturn(Optional.empty());
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(reportLocationsFinder, times(1)).find(parameters);
verify(labeledMetricConfByPatternProducer, times(1)).create(parameters);
verify(reportEntryReader, times(1)).read(fs, new Path("/path/to/report"));
verify(reportEntryConverter, times(1)).convert(reportEntries, "/path/to/report", Collections.emptyMap());
verify(gaugesRegistrar, times(1)).register(gauges);
verify(groupingKeyProducer, times(1)).create(parameters);
verify(metricPusher, never()).pushSafe(any(), any(), any());
}
@Test
public void runShouldPushUsingMetricPusher() {
// given
PortBindings portBindings = new PortBindings(Collections.emptyMap(), Collections.emptyMap());
Configuration conf = new Configuration();
Map<String, String> parameters = Collections.singletonMap("reportsDirPath", "/path/to/report");
MetricPusherCreator metricPusherCreator = mock(MetricPusherCreator.class);
when(metricPusherCreatorProducer.create(parameters)).thenReturn(Optional.of(metricPusherCreator));
MetricPusher metricPusher = mock(MetricPusher.class);
when(metricPusherProducer.create(metricPusherCreator, parameters)).thenReturn(Optional.of(metricPusher));
FileSystem fs = mock(FileSystem.class);
when(fileSystemProducer.create(conf)).thenReturn(Optional.of(fs));
when(reportLocationsFinder.find(parameters)).thenReturn(Optional.of(Collections.singletonList("/path/to/report")));
when(labeledMetricConfByPatternProducer.create(parameters)).thenReturn(Optional.of(Collections.emptyMap()));
List<ReportEntry> reportEntries = Collections.singletonList(mock(ReportEntry.class));
when(reportEntryReader.read(fs, new Path("/path/to/report"))).thenReturn(Optional.of(reportEntries));
List<Gauge> gauges = Collections.singletonList(mock(Gauge.class));
when(reportEntryConverter.convert(reportEntries, "/path/to/report", Collections.emptyMap())).thenReturn(Optional.of(gauges));
CollectorRegistry collectorRegistry = mock(CollectorRegistry.class);
when(gaugesRegistrar.register(gauges)).thenReturn(Optional.of(collectorRegistry));
when(groupingKeyProducer.create(parameters)).thenReturn(Optional.of(Collections.singletonMap("grouping.key", "value")));
// when
pushMetricsProcess.run(portBindings, conf, parameters);
// then
verify(metricPusherCreatorProducer, times(1)).create(parameters);
verify(metricPusherProducer, times(1)).create(metricPusherCreator, parameters);
verify(fileSystemProducer, times(1)).create(conf);
verify(reportLocationsFinder, times(1)).find(parameters);
verify(labeledMetricConfByPatternProducer, times(1)).create(parameters);
verify(reportEntryReader, times(1)).read(fs, new Path("/path/to/report"));
verify(reportEntryConverter, times(1)).convert(reportEntries, "/path/to/report", Collections.emptyMap());
verify(gaugesRegistrar, times(1)).register(gauges);
verify(groupingKeyProducer, times(1)).create(parameters);
verify(metricPusher, times(1)).pushSafe(collectorRegistry, "iis", Collections.singletonMap("grouping.key", "value"));
}
}
| |
package net.respectnetwork.csp.application.dao.mysql;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.math.BigDecimal;
import java.util.List;
import java.util.ArrayList;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.respectnetwork.csp.application.model.InviteModel;
import net.respectnetwork.csp.application.dao.InviteDAO;
import net.respectnetwork.csp.application.dao.DAOException;
public class InviteDAOImpl extends BaseDAOImpl implements InviteDAO
{
private static final Logger logger = LoggerFactory.getLogger(InviteDAOImpl.class);
public InviteDAOImpl()
{
super();
logger.info("InviteDAOImpl() created");
}
private InviteModel get( ResultSet rset ) throws SQLException
{
InviteModel inv = new InviteModel();
inv.setInviteId (rset.getString (1));
inv.setCspCloudName (rset.getString (2));
inv.setInviterCloudName (rset.getString (3));
inv.setInvitedEmailAddress(rset.getString (4));
inv.setEmailSubject (rset.getString (5));
inv.setEmailMessage (rset.getString (6));
inv.setTimeCreated (rset.getTimestamp(7));
return inv;
}
public List<InviteModel> list( String inviterCloudName ) throws DAOException
{
logger.info("list() " + inviterCloudName);
List<InviteModel> rtn = null;
Connection conn = this.getConnection();
PreparedStatement stmt = null;
ResultSet rset = null;
String sql = null;
try
{
sql = "select invite_id, csp_cloudname, inviter_cloudname, invited_email_address, email_subject, email_message, time_created from invite where inviter_cloudname = ?";
logger.info(sql + " : " + inviterCloudName);
stmt = conn.prepareStatement(sql);
stmt.setString(1, inviterCloudName);
rset = stmt.executeQuery();
while( rset.next() )
{
InviteModel inv = this.get(rset);
if( rtn == null )
{
rtn = new ArrayList<InviteModel>();
}
rtn.add(inv);
logger.info(inv.toString());
}
rset.close();
rset = null;
stmt.close();
stmt = null;
}
catch( SQLException e )
{
String err = "Failed to execute SQL statement - " + sql;
logger.error(err, e);
throw new DAOException(err, e);
}
finally
{
this.closeConnection(conn, stmt, rset);
}
if( rtn == null )
{
logger.error("No Invite found");
}
else
{
logger.error("Invite found = " + rtn.size());
}
return rtn;
}
public List<InviteModel> listGroupByInvited( String inviterCloudName ) throws DAOException
{
logger.info("listlistGroupByInvited() " + inviterCloudName);
List<InviteModel> rtn = null;
Connection conn = this.getConnection();
PreparedStatement stmt = null;
ResultSet rset = null;
String sql = null;
try
{
sql = "select lower(invited_email_address), max(time_created), (select count(*) from giftcode where invite_id = p.invite_id) from invite p where inviter_cloudname = ? group by lower(invited_email_address) order by max(time_created) desc, lower(invited_email_address)";
logger.info(sql + " : " + inviterCloudName);
stmt = conn.prepareStatement(sql);
stmt.setString(1, inviterCloudName);
rset = stmt.executeQuery();
while( rset.next() )
{
InviteModel inv = new InviteModel();
inv.setInvitedEmailAddress(rset.getString (1));
inv.setTimeCreated (rset.getTimestamp(2));
inv.setGiftCardCount (rset.getInt (3));
if( rtn == null )
{
rtn = new ArrayList<InviteModel>();
}
rtn.add(inv);
logger.info(inv.toString());
}
rset.close();
rset = null;
stmt.close();
stmt = null;
}
catch( SQLException e )
{
String err = "Failed to execute SQL statement - " + sql;
logger.error(err, e);
throw new DAOException(err, e);
}
finally
{
this.closeConnection(conn, stmt, rset);
}
if( rtn == null )
{
logger.error("No Invite found");
}
else
{
logger.error("Invite found = " + rtn.size());
}
return rtn;
}
public InviteModel get( String inviteId ) throws DAOException
{
logger.info("get() - " + inviteId);
InviteModel rtn = null;
Connection conn = this.getConnection();
PreparedStatement stmt = null;
ResultSet rset = null;
String sql = null;
try
{
sql = "select invite_id, csp_cloudname, inviter_cloudname, invited_email_address, email_subject, email_message, time_created from invite where invite_id = ?";
logger.info(sql + " : " + inviteId);
stmt = conn.prepareStatement(sql);
stmt.setString(1, inviteId);
rset = stmt.executeQuery();
if( rset.next() )
{
rtn = this.get(rset);
logger.info(rtn.toString());
}
rset.close();
rset = null;
stmt.close();
stmt = null;
}
catch( SQLException e )
{
String err = "Failed to execute SQL statement - " + sql;
logger.error(err, e);
throw new DAOException(err, e);
}
finally
{
this.closeConnection(conn, stmt, rset);
}
if( rtn == null )
{
logger.error("Invite not found - " + inviteId);
}
return rtn;
}
public InviteModel insert( InviteModel invite ) throws DAOException
{
logger.info("insert() - " + invite);
InviteModel rtn = null;
Connection conn = this.getConnection();
PreparedStatement stmt = null;
String sql = null;
try
{
sql = "insert into invite (invite_id, csp_cloudname, inviter_cloudname, invited_email_address, email_subject, email_message, time_created) values (?, ?, ?, ?, ?, ?, now())";
logger.info(sql + " : " + invite);
stmt = conn.prepareStatement(sql);
stmt.setString(1, invite.getInviteId());
stmt.setString(2, invite.getCspCloudName());
stmt.setString(3, invite.getInviterCloudName());
stmt.setString(4, invite.getInvitedEmailAddress());
stmt.setString(5, invite.getEmailSubject());
stmt.setString(6, invite.getEmailMessage());
int rows = stmt.executeUpdate();
if( rows != 1 )
{
logger.error(sql + " : " + invite + " return " + rows + " rows ");
}
else
{
rtn = invite;
}
stmt.close();
stmt = null;
}
catch( SQLException e )
{
String err = "Failed to execute SQL statement - " + sql;
logger.error(err, e);
throw new DAOException(err, e);
}
finally
{
this.closeConnection(conn, stmt);
}
if( rtn == null )
{
logger.error("Invite insert failed - " + invite);
}
return rtn;
}
}
| |
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.localauthentication;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSData;
import apple.foundation.NSError;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSNumber;
import apple.foundation.NSSet;
import apple.security.opaque.SecAccessControlRef;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.ReferenceInfo;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.Ptr;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* Class that represents an authentication context.
* <p>
* This context can be used for evaluating policies.
*
* @see LAPolicy
*/
@Generated
@Library("LocalAuthentication")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class LAContext extends NSObject {
static {
NatJ.register();
}
@Generated
protected LAContext(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native LAContext alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native LAContext allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native LAContext new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
/**
* Determines if a particular policy can be evaluated.
* <p>
* Policies can have certain requirements which, when not satisfied, would always cause
* the policy evaluation to fail - e.g. a passcode set, a fingerprint
* enrolled with Touch ID or a face set up with Face ID. This method allows easy checking
* for such conditions.
* <p>
* Applications should consume the returned value immediately and avoid relying on it
* for an extensive period of time. At least, it is guaranteed to stay valid until the
* application enters background.
* <p>
* [@warning] Do not call this method in the reply block of evaluatePolicy:reply: because it could
* lead to a deadlock.
*
* @param policy Policy for which the preflight check should be run.
* @param error Optional output parameter which is set to nil if the policy can be evaluated, or it
* contains error information if policy evaluation is not possible.
* @return YES if the policy can be evaluated, NO otherwise.
*/
@Generated
@Selector("canEvaluatePolicy:error:")
public native boolean canEvaluatePolicyError(@NInt long policy,
@ReferenceInfo(type = NSError.class) Ptr<NSError> error);
/**
* Evaluates access control object for the specified operation.
* <p>
* Access control evaluation may involve prompting user for various kinds of interaction
* or authentication. Actual behavior is dependent on evaluated access control, device type,
* and can be affected by installed configuration profiles.
* <p>
* Be sure to keep a strong reference to the context while the evaluation is in progress.
* Otherwise, an evaluation would be canceled when the context is being deallocated.
* <p>
* The method does not block. Instead, the caller must provide a reply block to be
* called asynchronously when evaluation finishes. The block is executed on a private
* queue internal to the framework in an unspecified threading context. Other than that,
* no guarantee is made about which queue, thread, or run-loop the block is executed on.
* <p>
* After successful access control evaluation, the LAContext can be used with keychain operations,
* so that they do not require user to authenticate.
* <p>
* Access control evaluation may fail for various reasons, including user cancel, system cancel
* and others, see LAError codes.
* <p>
* [@warning] localizedReason parameter is mandatory and the call will throw NSInvalidArgumentException if
* nil or empty string is specified.
* <p>
* [@warning] Applications should also supply NSFaceIDUsageDescription key in the Info.plist. This key identifies
* a string value that contains a message to be displayed to users when the app is trying to use
* Face ID for the first time. Users can choose to allow or deny the use of Face ID by the app before
* the first use or later in Face ID privacy settings. When the use of Face ID is denied, evaluations
* will fail with LAErrorBiometryNotAvailable.
*
* @param accessControl Access control object that is typically created by SecAccessControlCreateWithFlags.
* @param operation Type of operation the access control will be used with.
* @param localizedReason Application reason for authentication. This string must be provided in correct
* localization and should be short and clear. It will be eventually displayed in
* the authentication dialog subtitle for Touch ID or passcode. The name of the
* calling application will be displayed in title, so it should not be duplicated here.
* <p>
* This parameter is mostly ignored by Face ID authentication. Face ID will show
* generic instructions unless a customized fallback title is provided in
* localizedFallbackTitle property. For that case, it will show the authentication
* reason so that the instructions can be made consistent with the custom button
* title. Therefore, you should make sure that users are already aware of the need
* and reason for Face ID authentication before they have triggered the policy evaluation.
* @param reply Reply block that is executed when access control evaluation finishes.
* success Reply parameter that is YES if the access control has been evaluated successfully or
* NO if the evaluation failed.
* error Reply parameter that is nil if the access control has been evaluated successfully, or
* it contains error information about the evaluation failure.
*/
@Generated
@Selector("evaluateAccessControl:operation:localizedReason:reply:")
public native void evaluateAccessControlOperationLocalizedReasonReply(SecAccessControlRef accessControl,
@NInt long operation, String localizedReason,
@ObjCBlock(name = "call_evaluateAccessControlOperationLocalizedReasonReply") Block_evaluateAccessControlOperationLocalizedReasonReply reply);
/**
* Evaluates the specified policy.
* <p>
* Policy evaluation may involve prompting user for various kinds of interaction
* or authentication. Actual behavior is dependent on evaluated policy, device type,
* and can be affected by installed configuration profiles.
* <p>
* Be sure to keep a strong reference to the context while the evaluation is in progress.
* Otherwise, an evaluation would be canceled when the context is being deallocated.
* <p>
* The method does not block. Instead, the caller must provide a reply block to be
* called asynchronously when evaluation finishes. The block is executed on a private
* queue internal to the framework in an unspecified threading context. Other than that,
* no guarantee is made about which queue, thread, or run-loop the block is executed on.
* <p>
* Implications of successful policy evaluation are policy specific. In general, this
* operation is not idempotent. Policy evaluation may fail for various reasons, including
* user cancel, system cancel and others, see LAError codes.
* <p>
* [@warning] localizedReason parameter is mandatory and the call will throw NSInvalidArgumentException if
* nil or empty string is specified.
* <p>
* [@warning] Applications should also supply NSFaceIDUsageDescription key in the Info.plist. This key identifies
* a string value that contains a message to be displayed to users when the app is trying to use
* Face ID for the first time. Users can choose to allow or deny the use of Face ID by the app before
* the first use or later in Face ID privacy settings. When the use of Face ID is denied, evaluations
* will fail with LAErrorBiometryNotAvailable.
* <p>
* [@li] LAErrorUserFallback if user tapped the fallback button
* [@li] LAErrorUserCancel if user has tapped the Cancel button
* [@li] LAErrorSystemCancel if some system event interrupted the evaluation (e.g. Home button pressed).
*
* @param policy Policy to be evaluated.
* @param reply Reply block that is executed when policy evaluation finishes.
* success Reply parameter that is YES if the policy has been evaluated successfully or
* NO if the evaluation failed.
* error Reply parameter that is nil if the policy has been evaluated successfully, or it
* contains error information about the evaluation failure.
* @param localizedReason Application reason for authentication. This string must be provided in correct
* localization and should be short and clear. It will be eventually displayed in
* the authentication dialog subtitle for Touch ID or passcode. The name of the
* calling application will be displayed in title, so it should not be duplicated here.
* <p>
* This parameter is mostly ignored by Face ID authentication. Face ID will show
* generic instructions unless a customized fallback title is provided in
* localizedFallbackTitle property. For that case, it will show the authentication
* reason so that the instructions can be made consistent with the custom button
* title. Therefore, you should make sure that users are already aware of the need
* and reason for Face ID authentication before they have triggered the policy evaluation.
* @see LAError
* <p>
* Typical error codes returned by this call are:
*/
@Generated
@Selector("evaluatePolicy:localizedReason:reply:")
public native void evaluatePolicyLocalizedReasonReply(@NInt long policy, String localizedReason,
@ObjCBlock(name = "call_evaluatePolicyLocalizedReasonReply") Block_evaluatePolicyLocalizedReasonReply reply);
/**
* Contains policy domain state.
* <p>
* This property is set only when evaluatePolicy is called and succesful Touch ID or Face ID authentication
* was performed, or when canEvaluatePolicy succeeds for a biometric policy.
* It stays nil for all other cases.
* If biometric database was modified (fingers or faces were removed or added), evaluatedPolicyDomainState
* data will change. Nature of such database changes cannot be determined
* but comparing data of evaluatedPolicyDomainState after different evaluatePolicy
* will reveal the fact database was changed between calls.
* <p>
* [@warning] Please note that the value returned by this property can change exceptionally between major OS versions even if
* the state of biometry has not changed.
*/
@Generated
@Selector("evaluatedPolicyDomainState")
public native NSData evaluatedPolicyDomainState();
@Generated
@Selector("init")
public native LAContext init();
/**
* Invalidates the context.
* <p>
* The context is invalidated automatically when it is (auto)released. This method
* allows invalidating it manually while it is still in scope.
* <p>
* Invalidation terminates any existing policy evaluation and the respective call will
* fail with LAErrorAppCancel. After the context has been invalidated, it can not be
* used for policy evaluation and an attempt to do so will fail with LAErrorInvalidContext.
* <p>
* Invalidating a context that has been already invalidated has no effect.
*/
@Generated
@Selector("invalidate")
public native void invalidate();
/**
* Reveals if credential was set with this context.
*
* @param type Type of credential we are asking for.
* @return YES on success, NO otherwise.
*/
@Generated
@Selector("isCredentialSet:")
public native boolean isCredentialSet(@NInt long type);
/**
* Cancel button title.
* <p>
* Allows cancel button title customization. A default title "Cancel" is used when
* this property is left nil or is set to empty string.
*/
@Generated
@Selector("localizedCancelTitle")
public native String localizedCancelTitle();
/**
* Fallback button title.
* <p>
* Allows fallback button title customization. If set to empty string, the button will be hidden.
* A default title "Enter Password" is used when this property is left nil.
*/
@Generated
@Selector("localizedFallbackTitle")
public native String localizedFallbackTitle();
/**
* This property is deprecated and setting it has no effect.
*/
@Generated
@Deprecated
@Selector("maxBiometryFailures")
public native NSNumber maxBiometryFailures();
/**
* Sets a credential to this context.
* <p>
* Some policies allow to bind application-provided credential with them.
* This method allows credential to be passed to the right context.
*
* @param credential Credential to be used with subsequent calls. Setting this parameter to nil will remove
* any existing credential of the specified type.
* @param type Type of the provided credential.
* @return YES if the credential was set successfully, NO otherwise.
*/
@Generated
@Selector("setCredential:type:")
public native boolean setCredentialType(NSData credential, @NInt long type);
/**
* Cancel button title.
* <p>
* Allows cancel button title customization. A default title "Cancel" is used when
* this property is left nil or is set to empty string.
*/
@Generated
@Selector("setLocalizedCancelTitle:")
public native void setLocalizedCancelTitle(String value);
/**
* Fallback button title.
* <p>
* Allows fallback button title customization. If set to empty string, the button will be hidden.
* A default title "Enter Password" is used when this property is left nil.
*/
@Generated
@Selector("setLocalizedFallbackTitle:")
public native void setLocalizedFallbackTitle(String value);
/**
* This property is deprecated and setting it has no effect.
*/
@Generated
@Deprecated
@Selector("setMaxBiometryFailures:")
public native void setMaxBiometryFailures(NSNumber value);
/**
* Time interval for accepting a successful Touch ID or Face ID device unlock (on the lock screen) from the past.
* <p>
* This property can be set with a time interval in seconds. If the device was successfully unlocked by
* biometry within this time interval, then biometric authentication on this context will succeed
* automatically and the reply block will be called without prompting user for Touch ID or Face ID.
* <p>
* The default value is 0, meaning that no previous biometric unlock can be reused.
* <p>
* This property is meant only for reusing biometric matches from the device lock screen.
* It does not allow reusing previous biometric matches in application or between applications.
* <p>
* The maximum supported interval is 5 minutes and setting the value beyond 5 minutes does not increase
* the accepted interval.
*
* @see LATouchIDAuthenticationMaximumAllowableReuseDuration
*/
@Generated
@Selector("setTouchIDAuthenticationAllowableReuseDuration:")
public native void setTouchIDAuthenticationAllowableReuseDuration(double value);
/**
* Time interval for accepting a successful Touch ID or Face ID device unlock (on the lock screen) from the past.
* <p>
* This property can be set with a time interval in seconds. If the device was successfully unlocked by
* biometry within this time interval, then biometric authentication on this context will succeed
* automatically and the reply block will be called without prompting user for Touch ID or Face ID.
* <p>
* The default value is 0, meaning that no previous biometric unlock can be reused.
* <p>
* This property is meant only for reusing biometric matches from the device lock screen.
* It does not allow reusing previous biometric matches in application or between applications.
* <p>
* The maximum supported interval is 5 minutes and setting the value beyond 5 minutes does not increase
* the accepted interval.
*
* @see LATouchIDAuthenticationMaximumAllowableReuseDuration
*/
@Generated
@Selector("touchIDAuthenticationAllowableReuseDuration")
public native double touchIDAuthenticationAllowableReuseDuration();
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_evaluateAccessControlOperationLocalizedReasonReply {
@Generated
void call_evaluateAccessControlOperationLocalizedReasonReply(boolean success, NSError error);
}
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_evaluatePolicyLocalizedReasonReply {
@Generated
void call_evaluatePolicyLocalizedReasonReply(boolean success, NSError error);
}
/**
* Indicates the type of the biometry supported by the device.
* <p>
* This property is set when canEvaluatePolicy has been called for a biometric policy.
* The default value is LABiometryTypeNone.
*/
@Generated
@Selector("biometryType")
@NInt
public native long biometryType();
/**
* Allows running authentication in non-interactive mode.
* <p>
* If the context is used in a keychain query by the means of kSecUseAuthenticationContext,
* then setting this property to YES has the same effect as passing kSecUseNoAuthenticationUI
* in the query, i.e. the keychain call will eventually fail with errSecInteractionNotAllowed
* instead of displaying the authentication UI.
* <p>
* If this property is used with a LocalAuthentication evaluation, it will eventually fail with
* LAErrorNotInteractive instead of displaying the authentication UI.
*/
@Generated
@Selector("interactionNotAllowed")
public native boolean interactionNotAllowed();
/**
* Allows setting the default localized authentication reason on context.
* <p>
* A localized string from this property is displayed in the authentication UI if the caller didn't specify
* its own authentication reason (e.g. a keychain operation with kSecUseAuthenticationContext). This property
* is ignored if the authentication reason was provided by caller.
*/
@Generated
@Selector("localizedReason")
public native String localizedReason();
/**
* Allows running authentication in non-interactive mode.
* <p>
* If the context is used in a keychain query by the means of kSecUseAuthenticationContext,
* then setting this property to YES has the same effect as passing kSecUseNoAuthenticationUI
* in the query, i.e. the keychain call will eventually fail with errSecInteractionNotAllowed
* instead of displaying the authentication UI.
* <p>
* If this property is used with a LocalAuthentication evaluation, it will eventually fail with
* LAErrorNotInteractive instead of displaying the authentication UI.
*/
@Generated
@Selector("setInteractionNotAllowed:")
public native void setInteractionNotAllowed(boolean value);
/**
* Allows setting the default localized authentication reason on context.
* <p>
* A localized string from this property is displayed in the authentication UI if the caller didn't specify
* its own authentication reason (e.g. a keychain operation with kSecUseAuthenticationContext). This property
* is ignored if the authentication reason was provided by caller.
*/
@Generated
@Selector("setLocalizedReason:")
public native void setLocalizedReason(String value);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jms.client;
import javax.jms.JMSException;
import javax.jms.MessageEOFException;
import javax.jms.MessageFormatException;
import javax.jms.StreamMessage;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.Pair;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.core.client.impl.ClientMessageImpl;
import org.apache.activemq.artemis.utils.DataConstants;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadBoolean;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadByte;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadBytes;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadChar;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadDouble;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadFloat;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadInteger;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadLong;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadObject;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadShort;
import static org.apache.activemq.artemis.reader.StreamMessageUtil.streamReadString;
/**
* ActiveMQ Artemis implementation of a JMS StreamMessage.
*/
public final class ActiveMQStreamMessage extends ActiveMQMessage implements StreamMessage {
public static final byte TYPE = Message.STREAM_TYPE;
protected ActiveMQStreamMessage(final ClientSession session) {
super(ActiveMQStreamMessage.TYPE, session);
}
protected ActiveMQStreamMessage(final ClientMessage message, final ClientSession session) {
super(message, session);
}
public ActiveMQStreamMessage(final StreamMessage foreign, final ClientSession session) throws JMSException {
super(foreign, ActiveMQStreamMessage.TYPE, session);
foreign.reset();
try {
while (true) {
Object obj = foreign.readObject();
writeObject(obj);
}
} catch (MessageEOFException e) {
// Ignore
}
}
// For testing only
public ActiveMQStreamMessage() {
message = new ClientMessageImpl((byte) 0, false, 0, 0, (byte) 4, 1500, null);
}
// Public --------------------------------------------------------
@Override
public byte getType() {
return ActiveMQStreamMessage.TYPE;
}
// StreamMessage implementation ----------------------------------
@Override
public boolean readBoolean() throws JMSException {
checkRead();
try {
return streamReadBoolean(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public byte readByte() throws JMSException {
checkRead();
try {
return streamReadByte(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public short readShort() throws JMSException {
checkRead();
try {
return streamReadShort(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public char readChar() throws JMSException {
checkRead();
try {
return streamReadChar(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public int readInt() throws JMSException {
checkRead();
try {
return streamReadInteger(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public long readLong() throws JMSException {
checkRead();
try {
return streamReadLong(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public float readFloat() throws JMSException {
checkRead();
try {
return streamReadFloat(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public double readDouble() throws JMSException {
checkRead();
try {
return streamReadDouble(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public String readString() throws JMSException {
checkRead();
try {
return streamReadString(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
/**
* len here is used to control how many more bytes to read
*/
private int len = 0;
@Override
public int readBytes(final byte[] value) throws JMSException {
checkRead();
try {
Pair<Integer, Integer> pairRead = streamReadBytes(message.getBodyBuffer(), len, value);
len = pairRead.getA();
return pairRead.getB();
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public Object readObject() throws JMSException {
checkRead();
try {
return streamReadObject(message.getBodyBuffer());
} catch (IllegalStateException e) {
throw new MessageFormatException(e.getMessage());
} catch (IndexOutOfBoundsException e) {
throw new MessageEOFException("");
}
}
@Override
public void writeBoolean(final boolean value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.BOOLEAN);
getBuffer().writeBoolean(value);
}
@Override
public void writeByte(final byte value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.BYTE);
getBuffer().writeByte(value);
}
@Override
public void writeShort(final short value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.SHORT);
getBuffer().writeShort(value);
}
@Override
public void writeChar(final char value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.CHAR);
getBuffer().writeShort((short) value);
}
@Override
public void writeInt(final int value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.INT);
getBuffer().writeInt(value);
}
@Override
public void writeLong(final long value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.LONG);
getBuffer().writeLong(value);
}
@Override
public void writeFloat(final float value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.FLOAT);
getBuffer().writeInt(Float.floatToIntBits(value));
}
@Override
public void writeDouble(final double value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.DOUBLE);
getBuffer().writeLong(Double.doubleToLongBits(value));
}
@Override
public void writeString(final String value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.STRING);
getBuffer().writeNullableString(value);
}
@Override
public void writeBytes(final byte[] value) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.BYTES);
getBuffer().writeInt(value.length);
getBuffer().writeBytes(value);
}
@Override
public void writeBytes(final byte[] value, final int offset, final int length) throws JMSException {
checkWrite();
getBuffer().writeByte(DataConstants.BYTES);
getBuffer().writeInt(length);
getBuffer().writeBytes(value, offset, length);
}
@Override
public void writeObject(final Object value) throws JMSException {
if (value instanceof String) {
writeString((String) value);
} else if (value instanceof Boolean) {
writeBoolean((Boolean) value);
} else if (value instanceof Byte) {
writeByte((Byte) value);
} else if (value instanceof Short) {
writeShort((Short) value);
} else if (value instanceof Integer) {
writeInt((Integer) value);
} else if (value instanceof Long) {
writeLong((Long) value);
} else if (value instanceof Float) {
writeFloat((Float) value);
} else if (value instanceof Double) {
writeDouble((Double) value);
} else if (value instanceof byte[]) {
writeBytes((byte[]) value);
} else if (value instanceof Character) {
writeChar((Character) value);
} else if (value == null) {
writeString(null);
} else {
throw new MessageFormatException("Invalid object type: " + value.getClass());
}
}
@Override
public void reset() throws JMSException {
if (!readOnly) {
readOnly = true;
}
getBuffer().resetReaderIndex();
}
// ActiveMQRAMessage overrides ----------------------------------------
@Override
public void clearBody() throws JMSException {
super.clearBody();
getBuffer().clear();
}
@Override
public void doBeforeSend() throws Exception {
reset();
}
private ActiveMQBuffer getBuffer() {
return message.getBodyBuffer();
}
@Override
public boolean isBodyAssignableTo(Class c) {
return false;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.updateSettings.impl;
import com.intellij.diagnostic.IdeErrorsDialog;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.*;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.notification.*;
import com.intellij.openapi.application.*;
import com.intellij.openapi.application.ex.ApplicationInfoEx;
import com.intellij.openapi.diagnostic.IdeaLoggingEvent;
import com.intellij.openapi.diagnostic.LogUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.Function;
import com.intellij.util.PlatformUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.HttpRequests;
import com.intellij.util.io.URLUtil;
import com.intellij.util.ui.UIUtil;
import com.intellij.xml.util.XmlStringUtil;
import org.apache.http.client.utils.URIBuilder;
import org.jdom.JDOMException;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.*;
/**
* See XML file by {@link ApplicationInfoEx#getUpdateUrls()} for reference.
*
* @author mike
* @since Oct 31, 2002
*/
public final class UpdateChecker {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.updateSettings.impl.UpdateChecker");
public static final NotificationGroup NOTIFICATIONS =
new NotificationGroup(IdeBundle.message("update.notifications.group"), NotificationDisplayType.STICKY_BALLOON, true);
@NonNls private static final String INSTALLATION_UID = "installation.uid";
@NonNls private static final String DISABLED_UPDATE = "disabled_update.txt";
private static Set<String> ourDisabledToUpdatePlugins;
private static final Map<String, String> ourAdditionalRequestOptions = ContainerUtil.newHashMap();
private static final Map<String, PluginDownloader> ourUpdatedPlugins = ContainerUtil.newHashMap();
private static class Holder {
private static final String UPDATE_URL = ApplicationInfoEx.getInstanceEx().getUpdateUrls().getCheckingUrl();
private static final String PATCHES_URL = ApplicationInfoEx.getInstanceEx().getUpdateUrls().getPatchesUrl();
}
private UpdateChecker() { }
private static String getUpdateUrl() {
String url = System.getProperty("idea.updates.url");
return url != null ? url : Holder.UPDATE_URL;
}
private static String getPatchesUrl() {
String url = System.getProperty("idea.patches.url");
return url != null ? url : Holder.PATCHES_URL;
}
/**
* For scheduled update checks.
*/
public static ActionCallback updateAndShowResult() {
final ActionCallback callback = new ActionCallback();
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
@Override
public void run() {
doUpdateAndShowResult(null, true, false, UpdateSettings.getInstance(), null, callback);
}
});
return callback;
}
/**
* For manual update checks (Help | Check for Updates, Settings | Updates | Check Now)
* (the latter action may pass customised update settings).
*/
public static void updateAndShowResult(@Nullable Project project, @Nullable UpdateSettings customSettings) {
final UpdateSettings settings = customSettings != null ? customSettings : UpdateSettings.getInstance();
final boolean fromSettings = customSettings != null;
ProgressManager.getInstance().run(new Task.Backgroundable(project, IdeBundle.message("updates.checking.progress"), true) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
doUpdateAndShowResult(getProject(), !fromSettings, true, settings, indicator, null);
}
@Override
public boolean isConditionalModal() {
return fromSettings;
}
@Override
public boolean shouldStartInBackground() {
return !fromSettings;
}
});
}
private static void doUpdateAndShowResult(@Nullable final Project project,
final boolean enableLink,
final boolean manualCheck,
@NotNull final UpdateSettings updateSettings,
@Nullable ProgressIndicator indicator,
@Nullable final ActionCallback callback) {
// check platform update
if (indicator != null) {
indicator.setText(IdeBundle.message("updates.checking.platform"));
}
final CheckForUpdateResult result = checkPlatformUpdate(updateSettings);
if (manualCheck && result.getState() == UpdateStrategy.State.LOADED) {
UpdateSettings settings = UpdateSettings.getInstance();
settings.saveLastCheckedInfo();
settings.setKnownChannelIds(result.getAllChannelsIds());
}
else if (result.getState() == UpdateStrategy.State.CONNECTION_ERROR) {
Exception e = result.getError();
if (e != null) LOG.debug(e);
String cause = e != null ? e.getMessage() : "internal error";
showErrorMessage(manualCheck, IdeBundle.message("updates.error.connection.failed", cause));
return;
}
// check plugins update (with regard to potential platform update)
if (indicator != null) {
indicator.setText(IdeBundle.message("updates.checking.plugins"));
}
final Collection<PluginDownloader> updatedPlugins;
final Collection<IdeaPluginDescriptor> incompatiblePlugins;
if (newChannelReady(result.getChannelToPropose())) {
updatedPlugins = null;
incompatiblePlugins = null;
}
else {
BuildNumber buildNumber = null;
UpdateChannel updatedChannel = result.getUpdatedChannel();
if (updatedChannel != null) {
BuildInfo latestBuild = updatedChannel.getLatestBuild();
if (latestBuild != null) {
buildNumber = latestBuild.getNumber();
}
}
incompatiblePlugins = buildNumber != null ? new HashSet<IdeaPluginDescriptor>() : null;
updatedPlugins = checkPluginsUpdate(manualCheck, updateSettings, indicator, incompatiblePlugins, buildNumber);
}
// show result
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
showUpdateResult(project, result, updateSettings, updatedPlugins, incompatiblePlugins, enableLink, manualCheck);
if (callback != null) {
callback.setDone();
}
}
}, ModalityState.NON_MODAL);
}
@NotNull
private static CheckForUpdateResult checkPlatformUpdate(@NotNull UpdateSettings settings) {
UpdatesInfo info;
try {
URIBuilder uriBuilder = new URIBuilder(getUpdateUrl());
if (!URLUtil.FILE_PROTOCOL.equals(uriBuilder.getScheme())) {
prepareUpdateCheckArgs(uriBuilder);
}
String updateUrl = uriBuilder.toString();
LogUtil.debug(LOG, "load update xml (UPDATE_URL='%s')", updateUrl);
info = HttpRequests.request(updateUrl).forceHttps(settings.canUseSecureConnection()).connect(new HttpRequests.RequestProcessor<UpdatesInfo>() {
@Override
public UpdatesInfo process(@NotNull HttpRequests.Request request) throws IOException {
try {
return new UpdatesInfo(JDOMUtil.load(request.getReader()));
}
catch (JDOMException e) {
// corrupted content, don't bother telling user
LOG.info(e);
return null;
}
}
});
}
catch (URISyntaxException e) {
return new CheckForUpdateResult(UpdateStrategy.State.CONNECTION_ERROR, e);
}
catch (IOException e) {
return new CheckForUpdateResult(UpdateStrategy.State.CONNECTION_ERROR, e);
}
if (info == null) {
return new CheckForUpdateResult(UpdateStrategy.State.NOTHING_LOADED);
}
ApplicationInfo appInfo = ApplicationInfo.getInstance();
int majorVersion = Integer.parseInt(appInfo.getMajorVersion());
UpdateStrategy strategy = new UpdateStrategy(majorVersion, appInfo.getBuild(), info, settings);
return strategy.checkForUpdates();
}
private static Collection<PluginDownloader> checkPluginsUpdate(boolean manualCheck,
@NotNull UpdateSettings updateSettings,
@Nullable ProgressIndicator indicator,
@Nullable Collection<IdeaPluginDescriptor> incompatiblePlugins,
@Nullable BuildNumber buildNumber) {
// collect installed plugins and plugins imported from a previous installation
Map<PluginId, IdeaPluginDescriptor> updateable = ContainerUtil.newTroveMap();
for (IdeaPluginDescriptor descriptor : PluginManagerCore.getPlugins()) {
if (!descriptor.isBundled()) {
updateable.put(descriptor.getPluginId(), descriptor);
}
}
File onceInstalled = new File(PathManager.getConfigPath(), PluginManager.INSTALLED_TXT);
if (onceInstalled.isFile()) {
try {
for (String line : FileUtil.loadLines(onceInstalled)) {
PluginId id = PluginId.getId(line.trim());
if (!updateable.containsKey(id)) {
updateable.put(id, null);
}
}
}
catch (IOException e) {
LOG.error(onceInstalled.getPath(), e);
}
onceInstalled.deleteOnExit();
}
if (updateable.isEmpty()) return null;
// check custom repositories and the main one for updates
Map<PluginId, PluginDownloader> toUpdate = ContainerUtil.newTroveMap();
List<String> hosts = RepositoryHelper.getPluginHosts();
InstalledPluginsState state = InstalledPluginsState.getInstance();
outer:
for (String host : hosts) {
try {
boolean forceHttps = host == null && updateSettings.canUseSecureConnection();
List<IdeaPluginDescriptor> list = RepositoryHelper.loadPlugins(host, buildNumber, forceHttps, indicator);
for (IdeaPluginDescriptor descriptor : list) {
PluginId id = descriptor.getPluginId();
if (updateable.containsKey(id)) {
updateable.remove(id);
state.onDescriptorDownload(descriptor);
PluginDownloader downloader = PluginDownloader.createDownloader(descriptor, host, buildNumber);
downloader.setForceHttps(forceHttps);
checkAndPrepareToInstall(downloader, state, toUpdate, incompatiblePlugins, indicator);
if (updateable.isEmpty()) {
break outer;
}
}
}
}
catch (IOException e) {
LOG.debug(e);
if (host != null) {
LOG.info("failed to load plugin descriptions from " + host + ": " + e.getMessage());
}
else {
showErrorMessage(manualCheck, IdeBundle.message("updates.error.connection.failed", e.getMessage()));
}
}
}
return toUpdate.isEmpty() ? null : toUpdate.values();
}
public static void checkAndPrepareToInstall(PluginDownloader downloader,
InstalledPluginsState state,
Map<PluginId, PluginDownloader> toUpdate,
Collection<IdeaPluginDescriptor> incompatiblePlugins,
@Nullable ProgressIndicator indicator) throws IOException {
String pluginId = downloader.getPluginId();
if (PluginManagerCore.getDisabledPlugins().contains(pluginId)) return;
String pluginVersion = downloader.getPluginVersion();
IdeaPluginDescriptor installedPlugin = PluginManager.getPlugin(PluginId.getId(pluginId));
if (installedPlugin == null || pluginVersion == null || PluginDownloader.compareVersionsSkipBroken(installedPlugin, pluginVersion) > 0) {
IdeaPluginDescriptor descriptor;
PluginDownloader oldDownloader = ourUpdatedPlugins.get(pluginId);
if (oldDownloader == null || StringUtil.compareVersionNumbers(pluginVersion, oldDownloader.getPluginVersion()) > 0) {
descriptor = downloader.getDescriptor();
if (descriptor instanceof PluginNode && ((PluginNode)descriptor).isIncomplete()) {
if (downloader.prepareToInstall(indicator == null ? new EmptyProgressIndicator() : indicator)) {
descriptor = downloader.getDescriptor();
}
ourUpdatedPlugins.put(pluginId, downloader);
}
}
else {
downloader = oldDownloader;
descriptor = oldDownloader.getDescriptor();
}
if (descriptor != null &&
!PluginManagerCore.isIncompatible(descriptor, downloader.getBuildNumber()) &&
!state.wasUpdated(descriptor.getPluginId())) {
toUpdate.put(PluginId.getId(pluginId), downloader);
}
}
//collect plugins which were not updated and would be incompatible with new version
if (incompatiblePlugins != null &&
installedPlugin != null && installedPlugin.isEnabled() && !toUpdate.containsKey(installedPlugin.getPluginId()) &&
PluginManagerCore.isIncompatible(installedPlugin, downloader.getBuildNumber())) {
incompatiblePlugins.add(installedPlugin);
}
}
private static void showErrorMessage(boolean showDialog, final String message) {
LOG.info(message);
if (showDialog) {
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
Messages.showErrorDialog(message, IdeBundle.message("updates.error.connection.title"));
}
});
}
}
@Contract("null -> false")
private static boolean newChannelReady(@Nullable UpdateChannel channelToPropose) {
return channelToPropose != null && channelToPropose.getLatestBuild() != null;
}
private static void showUpdateResult(@Nullable final Project project,
final CheckForUpdateResult checkForUpdateResult,
final UpdateSettings updateSettings,
final Collection<PluginDownloader> updatedPlugins,
final Collection<IdeaPluginDescriptor> incompatiblePlugins,
final boolean enableLink,
final boolean alwaysShowResults) {
final UpdateChannel channelToPropose = checkForUpdateResult.getChannelToPropose();
final UpdateChannel updatedChannel = checkForUpdateResult.getUpdatedChannel();
if (newChannelReady(channelToPropose)) {
Runnable runnable = new Runnable() {
@Override
public void run() {
new NewChannelDialog(channelToPropose).show();
}
};
if (alwaysShowResults) {
runnable.run();
}
else {
String message = IdeBundle.message("updates.new.version.available", ApplicationNamesInfo.getInstance().getFullProductName());
showNotification(project, message, false, runnable);
}
}
else if (updatedChannel != null) {
Runnable runnable = new Runnable() {
@Override
public void run() {
new UpdateInfoDialog(updatedChannel, enableLink, updateSettings.canUseSecureConnection(), updatedPlugins, incompatiblePlugins).show();
}
};
if (alwaysShowResults) {
runnable.run();
}
else {
String message = IdeBundle.message("updates.ready.message", ApplicationNamesInfo.getInstance().getFullProductName());
showNotification(project, message, false, runnable);
}
}
else if (updatedPlugins != null && !updatedPlugins.isEmpty()) {
Runnable runnable = new Runnable() {
@Override
public void run() {
new PluginUpdateInfoDialog(updatedPlugins, enableLink).show();
}
};
if (alwaysShowResults) {
runnable.run();
}
else {
String plugins = StringUtil.join(updatedPlugins, new Function<PluginDownloader, String>() {
@Override
public String fun(PluginDownloader downloader) {
return downloader.getPluginName();
}
}, ", ");
String message = IdeBundle.message("updates.plugins.ready.message", updatedPlugins.size(), plugins);
showNotification(project, message, false, runnable);
}
}
else if (alwaysShowResults) {
new NoUpdatesDialog(enableLink).show();
}
}
private static void showNotification(@Nullable Project project, String message, boolean error, @Nullable final Runnable runnable) {
NotificationListener listener = null;
if (runnable != null) {
listener = new NotificationListener() {
@Override
public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) {
notification.expire();
runnable.run();
}
};
}
String title = IdeBundle.message("update.notifications.title");
NotificationType type = error ? NotificationType.ERROR : NotificationType.INFORMATION;
NOTIFICATIONS.createNotification(title, XmlStringUtil.wrapInHtml(message), type, listener).notify(project);
}
public static void addUpdateRequestParameter(@NotNull String name, @NotNull String value) {
ourAdditionalRequestOptions.put(name, value);
}
private static void prepareUpdateCheckArgs(URIBuilder uriBuilder) {
addUpdateRequestParameter("build", ApplicationInfo.getInstance().getBuild().asString());
addUpdateRequestParameter("uid", getInstallationUID(PropertiesComponent.getInstance()));
addUpdateRequestParameter("os", SystemInfo.OS_NAME + ' ' + SystemInfo.OS_VERSION);
if (ApplicationInfoEx.getInstanceEx().isEAP()) {
addUpdateRequestParameter("eap", "");
}
for (String name : ourAdditionalRequestOptions.keySet()) {
String value = ourAdditionalRequestOptions.get(name);
uriBuilder.addParameter(name, StringUtil.isEmpty(value) ? null : value);
}
}
public static String getInstallationUID(final PropertiesComponent propertiesComponent) {
if (SystemInfo.isWindows) {
String uid = getInstallationUIDOnWindows(propertiesComponent);
if (uid != null) {
return uid;
}
}
String uid = propertiesComponent.getValue(INSTALLATION_UID);
if (uid == null) {
uid = generateUUID();
propertiesComponent.setValue(INSTALLATION_UID, uid);
}
return uid;
}
@Nullable
private static String getInstallationUIDOnWindows(PropertiesComponent propertiesComponent) {
String appdata = System.getenv("APPDATA");
if (appdata != null) {
File jetBrainsDir = new File(appdata, "JetBrains");
if (jetBrainsDir.exists() || jetBrainsDir.mkdirs()) {
File permanentIdFile = new File(jetBrainsDir, "PermanentUserId");
try {
if (permanentIdFile.exists()) {
return FileUtil.loadFile(permanentIdFile).trim();
}
String uuid = propertiesComponent.getValue(INSTALLATION_UID);
if (uuid == null) {
uuid = generateUUID();
}
FileUtil.writeToFile(permanentIdFile, uuid);
return uuid;
}
catch (IOException ignored) { }
}
}
return null;
}
private static String generateUUID() {
try {
return UUID.randomUUID().toString();
}
catch (Exception ignored) { }
catch (InternalError ignored) { }
return "";
}
public static void installPlatformUpdate(final PatchInfo patch, final BuildNumber toBuild, final boolean forceHttps) throws IOException {
ProgressManager.getInstance().runProcessWithProgressSynchronously(new ThrowableComputable<Void, IOException>() {
@Override
public Void compute() throws IOException {
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
downloadAndInstallPatch(patch, toBuild, forceHttps, indicator);
return null;
}
}, IdeBundle.message("update.downloading.patch.progress.title"), true, null);
}
private static void downloadAndInstallPatch(PatchInfo patch, BuildNumber toBuild, boolean forceHttps, final ProgressIndicator indicator) throws IOException {
String productCode = ApplicationInfo.getInstance().getBuild().getProductCode();
String fromBuildNumber = patch.getFromBuild().asStringWithoutProductCode();
String toBuildNumber = toBuild.asStringWithoutProductCode();
String bundledJdk = "";
String jdkMacRedist = System.getProperty("idea.java.redist");
if (jdkMacRedist != null && jdkMacRedist.lastIndexOf("jdk-bundled") >= 0) {
bundledJdk = "jdk-bundled".equals(jdkMacRedist) ? "-jdk-bundled" : "-custom-jdk-bundled";
}
String osSuffix = "-" + patch.getOSSuffix();
String fileName = productCode + "-" + fromBuildNumber + "-" + toBuildNumber + "-patch" + bundledJdk + osSuffix + ".jar";
String url = new URL(new URL(getPatchesUrl()), fileName).toString();
File tempFile = HttpRequests.request(url).gzip(false).forceHttps(forceHttps).connect(new HttpRequests.RequestProcessor<File>() {
@Override
public File process(@NotNull HttpRequests.Request request) throws IOException {
return request.saveToFile(FileUtil.createTempFile("ij.platform.", ".patch", true), indicator);
}
});
String patchFileName = ("jetbrains.patch.jar." + PlatformUtils.getPlatformPrefix()).toLowerCase(Locale.ENGLISH);
File patchFile = new File(FileUtil.getTempDirectory(), patchFileName);
FileUtil.copy(tempFile, patchFile);
FileUtil.delete(tempFile);
}
public static boolean installPluginUpdates(@NotNull Collection<PluginDownloader> downloaders, @NotNull ProgressIndicator indicator) {
boolean installed = false;
Set<String> disabledToUpdate = getDisabledToUpdatePlugins();
for (PluginDownloader downloader : downloaders) {
if (disabledToUpdate.contains(downloader.getPluginId())) {
continue;
}
try {
if (downloader.prepareToInstall(indicator)) {
IdeaPluginDescriptor descriptor = downloader.getDescriptor();
if (descriptor != null) {
downloader.install();
installed = true;
}
}
}
catch (IOException e) {
LOG.info(e);
}
}
return installed;
}
public static Set<String> getDisabledToUpdatePlugins() {
if (ourDisabledToUpdatePlugins == null) {
ourDisabledToUpdatePlugins = new TreeSet<String>();
if (!ApplicationManager.getApplication().isUnitTestMode()) {
try {
final File file = new File(PathManager.getConfigPath(), DISABLED_UPDATE);
if (file.isFile()) {
final String[] ids = FileUtil.loadFile(file).split("[\\s]");
for (String id : ids) {
if (id != null && id.trim().length() > 0) {
ourDisabledToUpdatePlugins.add(id.trim());
}
}
}
}
catch (IOException e) {
LOG.error(e);
}
}
}
return ourDisabledToUpdatePlugins;
}
public static void saveDisabledToUpdatePlugins() {
final File plugins = new File(PathManager.getConfigPath(), DISABLED_UPDATE);
try {
PluginManagerCore.savePluginsList(getDisabledToUpdatePlugins(), false, plugins);
}
catch (IOException e) {
LOG.error(e);
}
}
private static boolean ourHasFailedPlugins = false;
public static void checkForUpdate(IdeaLoggingEvent event) {
if (!ourHasFailedPlugins && UpdateSettings.getInstance().isCheckNeeded()) {
final Throwable throwable = event.getThrowable();
final IdeaPluginDescriptor pluginDescriptor = PluginManager.getPlugin(IdeErrorsDialog.findPluginId(throwable));
if (pluginDescriptor != null && !pluginDescriptor.isBundled()) {
ourHasFailedPlugins = true;
updateAndShowResult();
}
}
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.modeldriven.ui;
import java.util.List;
import org.drools.guvnor.client.common.DirtyableComposite;
import org.drools.guvnor.client.common.FormStylePopup;
import org.drools.guvnor.client.common.InfoPopup;
import org.drools.guvnor.client.common.SmallLabel;
import org.drools.guvnor.client.common.ValueChanged;
import org.drools.guvnor.client.messages.Constants;
import org.drools.ide.common.client.modeldriven.DropDownData;
import org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine;
import org.drools.ide.common.client.modeldriven.brl.FactPattern;
import org.drools.ide.common.client.modeldriven.brl.BaseSingleFieldConstraint;
import org.drools.ide.common.client.modeldriven.brl.RuleModel;
import org.drools.ide.common.client.modeldriven.brl.SingleFieldConstraint;
import org.drools.ide.common.client.factconstraints.customform.CustomFormConfiguration;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HasVerticalAlignment;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.ListBox;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.TextBox;
import com.google.gwt.user.client.ui.Widget;
import org.drools.guvnor.client.packages.WorkingSetManager;
import org.drools.guvnor.client.resources.Images;
/**
* This is an editor for constraint values.
* How this behaves depends on the constraint value type.
* When the constraint value has no type, it will allow the user to choose the first time.
*/
public class ConstraintValueEditor extends DirtyableComposite {
private Constants constants = ((Constants) GWT.create( Constants.class ));
private static Images images = GWT.create( Images.class );
private final FactPattern pattern;
private final String fieldName;
private final SuggestionCompletionEngine sce;
private final BaseSingleFieldConstraint constraint;
private final Panel panel;
private final RuleModel model;
private final RuleModeller modeller;
private final boolean numericValue;
private DropDownData dropDownData;
private String fieldType;
private boolean readOnly;
private Command onValueChangeCommand;
private boolean isDropDownDataEnum;
public ConstraintValueEditor(FactPattern pattern,
String fieldName,
BaseSingleFieldConstraint con,
RuleModeller modeller,
String valueType,
boolean readOnly) {
this.pattern = pattern;
this.fieldName = fieldName;
this.sce = modeller.getSuggestionCompletions();
this.constraint = con;
this.panel = new SimplePanel();
this.model = modeller.getModel();
this.modeller = modeller;
valueType = sce.getFieldType( pattern.getFactType(),
fieldName );
this.fieldType = valueType;
this.numericValue = SuggestionCompletionEngine.TYPE_NUMERIC.equals( valueType );
this.readOnly = readOnly;
if ( SuggestionCompletionEngine.TYPE_BOOLEAN.equals( valueType ) ) {
this.dropDownData = DropDownData.create( new String[]{"true", "false"} ); //NON-NLS
isDropDownDataEnum = false;
} else {
this.dropDownData = sce.getEnums( pattern,
fieldName );
isDropDownDataEnum = true;
}
refreshEditor();
initWidget( panel );
}
public BaseSingleFieldConstraint getConstraint() {
return constraint;
}
private void refreshEditor() {
panel.clear();
Widget constraintWidget = null;
if ( constraint.getConstraintValueType() == SingleFieldConstraint.TYPE_UNDEFINED ) {
Image clickme = new Image( images.edit() );
clickme.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
showTypeChoice( (Widget) event.getSource(),
constraint );
}
} );
constraintWidget = clickme;
} else {
switch ( constraint.getConstraintValueType() ) {
case SingleFieldConstraint.TYPE_LITERAL :
case SingleFieldConstraint.TYPE_ENUM :
if ( this.constraint instanceof SingleFieldConstraint ) {
final SingleFieldConstraint con = (SingleFieldConstraint) this.constraint;
CustomFormConfiguration customFormConfiguration = WorkingSetManager.getInstance().getCustomFormConfiguration( modeller.getAsset().getMetaData().getPackageName(),
pattern.getFactType(),
fieldName );
if ( customFormConfiguration != null ) {
constraintWidget = new Button( con.getValue(),
new ClickHandler() {
public void onClick(ClickEvent event) {
showTypeChoice( (Widget) event.getSource(),
constraint );
}
} );
((Button)constraintWidget).setEnabled(!this.readOnly);
break;
}
}
if ( this.dropDownData != null ) {
constraintWidget = new EnumDropDownLabel( this.pattern,
this.fieldName,
this.sce,
this.constraint, !this.readOnly);
if (!this.readOnly){
((EnumDropDownLabel) constraintWidget).setOnValueChangeCommand( new Command() {
public void execute() {
executeOnValueChangeCommand();
}
} );
}
} else if ( SuggestionCompletionEngine.TYPE_DATE.equals( this.fieldType ) ) {
DatePickerLabel datePicker = new DatePickerLabel( constraint.getValue() );
// Set the default time
this.constraint.setValue( datePicker.getDateString() );
if ( !this.readOnly ) {
datePicker.addValueChanged( new ValueChanged() {
public void valueChanged(String newValue) {
executeOnValueChangeCommand();
constraint.setValue( newValue );
}
} );
constraintWidget = datePicker;
} else {
constraintWidget = new SmallLabel( this.constraint.getValue() );
}
} else {
if ( !this.readOnly ) {
constraintWidget = new DefaultLiteralEditor( this.constraint,
this.numericValue );
((DefaultLiteralEditor) constraintWidget).setOnValueChangeCommand( new Command() {
public void execute() {
executeOnValueChangeCommand();
}
} );
} else {
constraintWidget = new SmallLabel( this.constraint.getValue() );
}
}
break;
case SingleFieldConstraint.TYPE_RET_VALUE :
constraintWidget = returnValueEditor();
break;
case SingleFieldConstraint.TYPE_EXPR_BUILDER_VALUE :
constraintWidget = expressionEditor();
break;
case SingleFieldConstraint.TYPE_VARIABLE :
constraintWidget = variableEditor();
break;
case BaseSingleFieldConstraint.TYPE_TEMPLATE :
constraintWidget = new DefaultLiteralEditor( this.constraint,
false );
break;
default :
break;
}
}
panel.add( constraintWidget );
}
private Widget variableEditor() {
if ( this.readOnly ) {
return new SmallLabel( this.constraint.getValue() );
}
List<String> vars = this.model.getBoundVariablesInScope( this.constraint );
final ListBox box = new ListBox();
if ( this.constraint.getValue() == null ) {
box.addItem( constants.Choose() );
}
int j = 0;
for ( String var : vars ) {
FactPattern f = model.getBoundFact( var );
String fv = model.getBindingType( var );
if ( (f != null && f.getFactType().equals( this.fieldType )) || (fv != null && fv.equals( this.fieldType )) ) {
box.addItem( var );
if ( this.constraint.getValue() != null && this.constraint.getValue().equals( var ) ) {
box.setSelectedIndex( j );
}
j++;
} else {
// for collection, present the list of possible bound variable
String factCollectionType = sce.getParametricFieldType( pattern.getFactType(),
this.fieldName );
if ( (f != null && factCollectionType != null && f.getFactType().equals( factCollectionType )) || (factCollectionType != null && factCollectionType.equals( fv )) ) {
box.addItem( var );
if ( this.constraint.getValue() != null && this.constraint.getValue().equals( var ) ) {
box.setSelectedIndex( j );
}
j++;
}
}
}
box.addChangeHandler( new ChangeHandler() {
public void onChange(ChangeEvent event) {
executeOnValueChangeCommand();
constraint.setValue( box.getItemText( box.getSelectedIndex() ) );
}
} );
return box;
}
/**
* An editor for the retval "formula" (expression).
*/
private Widget returnValueEditor() {
TextBox box = new BoundTextBox( constraint );
if ( this.readOnly ) {
return new SmallLabel( box.getText() );
}
String msg = constants.FormulaEvaluateToAValue();
Image img = new Image( images.functionAssets() );
img.setTitle( msg );
box.setTitle( msg );
box.addChangeHandler( new ChangeHandler() {
public void onChange(ChangeEvent event) {
executeOnValueChangeCommand();
}
} );
Widget ed = widgets( img,
box );
return ed;
}
private Widget expressionEditor() {
if ( !(this.constraint instanceof SingleFieldConstraint) ) {
throw new IllegalArgumentException( "Expected SingleFieldConstraint, but " + constraint.getClass().getName() + " found." );
}
ExpressionBuilder builder = new ExpressionBuilder( this.modeller,
((SingleFieldConstraint) this.constraint).getExpressionValue(), this.readOnly );
builder.addExpressionTypeChangeHandler( new ExpressionTypeChangeHandler() {
public void onExpressionTypeChanged(ExpressionTypeChangeEvent event) {
System.out.println( "type changed: " + event.getOldType() + " -> " + event.getNewType() );
}
} );
builder.addOnModifiedCommand( new Command() {
public void execute() {
executeOnValueChangeCommand();
}
} );
Widget ed = widgets( new HTML( " " ),
builder );
return ed;
}
/**
* Show a list of possibilities for the value type.
*/
private void showTypeChoice(Widget w,
final BaseSingleFieldConstraint con) {
CustomFormConfiguration customFormConfiguration = WorkingSetManager.getInstance().getCustomFormConfiguration( modeller.getAsset().getMetaData().getPackageName(),
pattern.getFactType(),
fieldName );
if ( customFormConfiguration != null ) {
if ( !(con instanceof SingleFieldConstraint) ) {
Window.alert( "Unexpected constraint type!" );
return;
}
final CustomFormPopUp customFormPopUp = new CustomFormPopUp( images.newexWiz(),
constants.FieldValue(),
customFormConfiguration );
final SingleFieldConstraint sfc = (SingleFieldConstraint) con;
customFormPopUp.addOkButtonHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
sfc.setConstraintValueType( SingleFieldConstraint.TYPE_LITERAL );
sfc.setId( customFormPopUp.getFormId() );
sfc.setValue( customFormPopUp.getFormValue() );
doTypeChosen( customFormPopUp );
}
} );
customFormPopUp.show( sfc.getId(),
sfc.getValue() );
return;
}
final FormStylePopup form = new FormStylePopup( images.newexWiz(),
constants.FieldValue() );
Button lit = new Button( constants.LiteralValue() );
lit.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
con.setConstraintValueType( isDropDownDataEnum && dropDownData != null ? SingleFieldConstraint.TYPE_ENUM : SingleFieldConstraint.TYPE_LITERAL );
doTypeChosen( form );
}
} );
form.addAttribute( constants.LiteralValue() + ":",
widgets( lit,
new InfoPopup( constants.LiteralValue(),
constants.LiteralValTip() ) ) );
if ( modeller.isTemplate() ) {
String templateKeyLabel = constants.TemplateKey();
Button templateKeyButton = new Button( templateKeyLabel );
templateKeyButton.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
con.setConstraintValueType( BaseSingleFieldConstraint.TYPE_TEMPLATE );
doTypeChosen( form );
}
} );
form.addAttribute( templateKeyLabel + ":",
widgets( templateKeyButton,
new InfoPopup( templateKeyLabel,
constants.LiteralValTip() ) ) );
}
form.addRow( new HTML( "<hr/>" ) );
form.addRow( new SmallLabel( constants.AdvancedOptions() ) );
//only want to show variables if we have some !
if ( this.model.getBoundVariablesInScope( this.constraint ).size() > 0 || SuggestionCompletionEngine.TYPE_COLLECTION.equals( this.fieldType ) ) {
List<String> vars = this.model.getBoundFacts();
boolean foundABouncVariableThatMatches = false;
for ( String var : vars ) {
FactPattern f = model.getBoundFact( var );
String fieldConstraint = model.getBindingType( var );
if ( (f != null && f.getFactType() != null && f.getFactType().equals( this.fieldType )) || (this.fieldType != null && this.fieldType.equals( fieldConstraint )) ) {
foundABouncVariableThatMatches = true;
break;
} else {
// for collection, present the list of possible bound variable
String factCollectionType = sce.getParametricFieldType( pattern.getFactType(),
this.fieldName );
if ( (f != null && factCollectionType != null && f.getFactType().equals( factCollectionType )) || (factCollectionType != null && factCollectionType.equals( fieldConstraint )) ) {
foundABouncVariableThatMatches = true;
break;
}
}
}
if ( foundABouncVariableThatMatches ) {
Button variable = new Button( constants.BoundVariable() );
variable.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
con.setConstraintValueType( SingleFieldConstraint.TYPE_VARIABLE );
doTypeChosen( form );
}
} );
form.addAttribute( constants.AVariable(),
widgets( variable,
new InfoPopup( constants.ABoundVariable(),
constants.BoundVariableTip() ) ) );
}
}
Button formula = new Button( constants.NewFormula() );
formula.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
con.setConstraintValueType( SingleFieldConstraint.TYPE_RET_VALUE );
doTypeChosen( form );
}
} );
form.addAttribute( constants.AFormula() + ":",
widgets( formula,
new InfoPopup( constants.AFormula(),
constants.FormulaExpressionTip() ) ) );
Button expression = new Button( constants.ExpressionEditor() );
expression.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
con.setConstraintValueType( SingleFieldConstraint.TYPE_EXPR_BUILDER_VALUE );
doTypeChosen( form );
}
} );
form.addAttribute( constants.ExpressionEditor() + ":",
widgets( expression,
new InfoPopup( constants.ExpressionEditor(),
constants.ExpressionEditor() ) ) );
form.show();
}
private void doTypeChosen(final FormStylePopup form) {
executeOnValueChangeCommand();
refreshEditor();
form.hide();
}
private Panel widgets(Widget left,
Widget right) {
HorizontalPanel panel = new HorizontalPanel();
panel.setVerticalAlignment( HasVerticalAlignment.ALIGN_MIDDLE );
panel.add( left );
panel.add( right );
panel.setWidth( "100%" );
return panel;
}
private void executeOnValueChangeCommand() {
if ( this.onValueChangeCommand != null ) {
this.onValueChangeCommand.execute();
}
}
public boolean isDirty() {
return super.isDirty();
}
public void setOnValueChangeCommand(Command onValueChangeCommand) {
this.onValueChangeCommand = onValueChangeCommand;
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.creation;
import com.streamsets.datacollector.config.AmazonEMRConfig;
import com.streamsets.datacollector.config.DeliveryGuaranteeChooserValues;
import com.streamsets.datacollector.config.ErrorHandlingChooserValues;
import com.streamsets.datacollector.config.ErrorRecordPolicy;
import com.streamsets.datacollector.config.ErrorRecordPolicyChooserValues;
import com.streamsets.datacollector.config.ExecutionModeChooserValues;
import com.streamsets.datacollector.config.LogLevel;
import com.streamsets.datacollector.config.LogLevelChooserValues;
import com.streamsets.datacollector.config.PipelineGroups;
import com.streamsets.datacollector.config.PipelineLifecycleStageChooserValues;
import com.streamsets.datacollector.config.PipelineState;
import com.streamsets.datacollector.config.PipelineStateChooserValues;
import com.streamsets.datacollector.config.PipelineTestStageChooserValues;
import com.streamsets.datacollector.config.PipelineWebhookConfig;
import com.streamsets.datacollector.config.StatsTargetChooserValues;
import com.streamsets.pipeline.api.ConfigDef;
import com.streamsets.pipeline.api.ConfigDefBean;
import com.streamsets.pipeline.api.ConfigGroups;
import com.streamsets.pipeline.api.DeliveryGuarantee;
import com.streamsets.pipeline.api.Dependency;
import com.streamsets.pipeline.api.ExecutionMode;
import com.streamsets.pipeline.api.GenerateResourceBundle;
import com.streamsets.pipeline.api.ListBeanModel;
import com.streamsets.pipeline.api.MultiValueChooserModel;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.StageDef;
import com.streamsets.pipeline.api.ValueChooserModel;
import java.util.Collections;
import java.util.List;
import java.util.Map;
// we are using the annotation for reference purposes only.
// the annotation processor does not work on this maven project
// we have a hardcoded 'datacollector-resource-bundles.json' file in resources
@GenerateResourceBundle
@StageDef(
version = PipelineConfigBean.VERSION,
label = "Pipeline",
upgrader = PipelineConfigUpgrader.class,
onlineHelpRefUrl = "not applicable"
)
@ConfigGroups(PipelineGroups.class)
public class PipelineConfigBean implements Stage {
public static final int VERSION = 12;
public static final String DEFAULT_STATS_AGGREGATOR_LIBRARY_NAME = "streamsets-datacollector-basic-lib";
public static final String DEFAULT_STATS_AGGREGATOR_STAGE_NAME =
"com_streamsets_pipeline_stage_destination_devnull_StatsDpmDirectlyDTarget";
public static final String DEFAULT_STATS_AGGREGATOR_STAGE_VERSION = "1";
public static final String STATS_DPM_DIRECTLY_TARGET = DEFAULT_STATS_AGGREGATOR_LIBRARY_NAME + "::" +
DEFAULT_STATS_AGGREGATOR_STAGE_NAME + "::" + DEFAULT_STATS_AGGREGATOR_STAGE_VERSION;
public static final String STATS_AGGREGATOR_DEFAULT = "streamsets-datacollector-basic-lib" +
"::com_streamsets_pipeline_stage_destination_devnull_StatsNullDTarget::1";
private static final String TRASH_TARGET = "streamsets-datacollector-basic-lib" +
"::com_streamsets_pipeline_stage_destination_devnull_ToErrorNullDTarget::1";
public static final String DEFAULT_TEST_ORIGIN_LIBRARY_NAME = "streamsets-datacollector-dev-lib";
public static final String DEFAULT_TEST_ORIGIN_STAGE_NAME =
"com_streamsets_pipeline_stage_devtest_rawdata_RawDataDSource";
public static final String DEFAULT_TEST_ORIGIN_STAGE_VERSION = "3";
public static final String RAW_DATA_ORIGIN = DEFAULT_TEST_ORIGIN_LIBRARY_NAME + "::" +
DEFAULT_TEST_ORIGIN_STAGE_NAME + "::" + DEFAULT_TEST_ORIGIN_STAGE_VERSION;
public static final String EDGE_HTTP_URL_DEFAULT = "http://localhost:18633";
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Execution Mode",
defaultValue= "STANDALONE",
displayPosition = 10
)
@ValueChooserModel(ExecutionModeChooserValues.class)
public ExecutionMode executionMode;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Data Collector Edge URL",
defaultValue = EDGE_HTTP_URL_DEFAULT,
displayPosition = 15,
dependsOn = "executionMode",
triggeredByValue = {"EDGE"}
)
public String edgeHttpUrl = EDGE_HTTP_URL_DEFAULT;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue="AT_LEAST_ONCE",
label = "Delivery Guarantee",
displayPosition = 20,
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING", "EDGE"}
)
@ValueChooserModel(DeliveryGuaranteeChooserValues.class)
public DeliveryGuarantee deliveryGuarantee;
@ConfigDef(
required = false,
type = ConfigDef.Type.MODEL,
label = "Test Origin",
description = "Stage used for testing in preview mode.",
defaultValue = RAW_DATA_ORIGIN,
displayPosition = 21,
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING", "EDGE"}
)
@ValueChooserModel(PipelineTestStageChooserValues.class)
public String testOriginStage;
@ConfigDef(
required = false,
type = ConfigDef.Type.MODEL,
label = "Start Event",
description = "Stage that should handle pipeline start event.",
defaultValue = TRASH_TARGET,
displayPosition = 23,
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
@ValueChooserModel(PipelineLifecycleStageChooserValues.class)
public String startEventStage;
@ConfigDef(
required = false,
type = ConfigDef.Type.MODEL,
label = "Stop Event",
description = "Stage that should handle pipeline stop event.",
defaultValue = TRASH_TARGET,
displayPosition = 26,
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
@ValueChooserModel(PipelineLifecycleStageChooserValues.class)
public String stopEventStage;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "true",
label = "Retry Pipeline on Error",
displayPosition = 30,
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
public boolean shouldRetry;
@ConfigDef(
required = false,
type = ConfigDef.Type.NUMBER,
defaultValue = "-1",
label = "Retry Attempts",
dependsOn = "shouldRetry",
triggeredByValue = "true",
description = "Max no of retries. To retry indefinitely, use -1. The wait time between retries starts at 15 seconds"
+ " and doubles until reaching 5 minutes.",
displayPosition = 30
)
public int retryAttempts;
@ConfigDef(
required = false,
type = ConfigDef.Type.MODEL,
defaultValue = "[\"RUN_ERROR\", \"STOPPED\", \"FINISHED\"]",
label = "Notify on Pipeline State Changes",
description = "Notifies via email when pipeline gets to the specified states",
displayPosition = 75,
group = "NOTIFICATIONS",
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
@MultiValueChooserModel(PipelineStateChooserValues.class)
public List<PipelineState> notifyOnStates;
@ConfigDef(
required = false,
type = ConfigDef.Type.LIST,
defaultValue = "[]",
label = "Email IDs",
description = "Email Addresses",
displayPosition = 76,
group = "NOTIFICATIONS",
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
public List<String> emailIDs;
@ConfigDef(
required = false,
defaultValue = "{}",
type = ConfigDef.Type.MAP,
label = "Parameters",
displayPosition = 80,
group = "PARAMETERS"
)
public Map<String, Object> constants;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Error Records",
displayPosition = 90,
group = "BAD_RECORDS",
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING", "EDGE"}
)
@ValueChooserModel(ErrorHandlingChooserValues.class)
public String badRecordsHandling;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue="ORIGINAL_RECORD",
label = "Error Record Policy",
description = "Determines which variation of the record is sent to error.",
displayPosition = 93,
group = "BAD_RECORDS",
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING", "EDGE"}
)
@ValueChooserModel(ErrorRecordPolicyChooserValues.class)
public ErrorRecordPolicy errorRecordPolicy = ErrorRecordPolicy.ORIGINAL_RECORD;
@ConfigDef(
required = false,
type = ConfigDef.Type.MODEL,
label = "Statistics Aggregator",
defaultValue = STATS_DPM_DIRECTLY_TARGET,
displayPosition = 95,
group = "STATS",
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING", "EDGE"}
)
@ValueChooserModel(StatsTargetChooserValues.class)
public String statsAggregatorStage = STATS_DPM_DIRECTLY_TARGET;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "Worker Count",
description = "Number of workers. 0 to start as many workers as Kafka partitions for topic.",
defaultValue = "0",
min = 0,
displayPosition = 100,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"CLUSTER_YARN_STREAMING"}
)
public long workerCount;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "Worker Memory (MB)",
defaultValue = "2048",
displayPosition = 150,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "EMR_BATCH"}
)
public long clusterSlaveMemory;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Worker Java Options",
defaultValue = "-XX:+UseConcMarkSweepGC -XX:+UseParNewGC -Dlog4j.debug",
description = "Add properties as needed. Changes to default settings are not recommended.",
displayPosition = 110,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "EMR_BATCH"}
)
public String clusterSlaveJavaOpts;
@ConfigDef(
required = false,
type = ConfigDef.Type.MAP,
defaultValue = "{}",
label = "Launcher ENV",
description = "Sets additional environment variables for the cluster launcher",
displayPosition = 120,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"CLUSTER_BATCH", "CLUSTER_YARN_STREAMING"}
)
public Map<String, String> clusterLauncherEnv;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Mesos Dispatcher URL",
description = "URL for service which launches Mesos framework",
displayPosition = 130,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"CLUSTER_MESOS_STREAMING"}
)
public String mesosDispatcherURL;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "INFO",
label = "Log level",
displayPosition = 140,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"EMR_BATCH"}
)
@ValueChooserModel(LogLevelChooserValues.class)
public LogLevel logLevel;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Checkpoint Configuration Directory",
description = "An SDC resource directory or symbolic link with HDFS/S3 configuration files core-site.xml and hdfs-site.xml",
displayPosition = 150,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"CLUSTER_MESOS_STREAMING"}
)
public String hdfsS3ConfDir;
@ConfigDef(
required = false,
type = ConfigDef.Type.NUMBER,
defaultValue = "0",
label = "Rate Limit (records / sec)",
description = "Maximum number of records per second that should be accepted into the pipeline. " +
"Rate is not limited if this is not set, or is set to 0",
displayPosition = 180,
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
public long rateLimit;
@ConfigDef(
required = false,
type = ConfigDef.Type.NUMBER,
defaultValue = "0",
label = "Max runners",
description = "Maximum number of runners that should be created for this pipeline. Use 0 to not impose limit.",
min = 0,
displayPosition = 190,
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
public int maxRunners = 0;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "true",
label = "Create Failure Snapshot",
description = "When selected and the pipeline execution fails with unrecoverable exception, SDC will attempt to create" +
"partial snapshot with records that have not been processed yet.",
dependencies = @Dependency(
configName = "executionMode", triggeredByValues = "STANDALONE"
),
displayPosition = 200
)
public boolean shouldCreateFailureSnapshot;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
defaultValue = "60",
label = "Runner Idle Time (sec)",
description = "When pipeline runners are idle for at least this time, run an empty batch through the runner to" +
" process any events or other time-driven functionality. Value -1 will disable this functionality completely.",
dependencies = @Dependency(
configName = "executionMode", triggeredByValues = "STANDALONE"
),
displayPosition = 210
)
public long runnerIdleTIme = 60;
@ConfigDef(required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "[]",
label = "Webhooks",
description = "Webhooks",
displayPosition = 210,
group = "NOTIFICATIONS",
dependsOn = "executionMode",
triggeredByValue = {"STANDALONE", "CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "CLUSTER_MESOS_STREAMING"}
)
@ListBeanModel
public List<PipelineWebhookConfig> webhookConfigs = Collections.emptyList();
@ConfigDef(
required = false,
type = ConfigDef.Type.MAP,
defaultValue = "{}",
label = "Extra Spark Configuration",
description = "Additional Spark Configuration to pass to the spark-submit script, the parameters will be passed " +
"as --conf <key>=<value>",
displayPosition = 220,
group = "CLUSTER",
dependsOn = "executionMode",
triggeredByValue = {"CLUSTER_BATCH", "CLUSTER_YARN_STREAMING", "BATCH", "STREAMING"}
)
public Map<String, String> sparkConfigs;
@ConfigDefBean
public AmazonEMRConfig amazonEMRConfig;
@Override
public List<ConfigIssue> init(Info info, Context context) {
return Collections.emptyList();
}
@Override
public void destroy() {
}
}
| |
/*
* #%L
* SparkCommerce Open Admin Platform
* %%
* Copyright (C) 2009 - 2013 Spark Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.sparkcommerce.openadmin.server.service.persistence.datasource;
import org.apache.commons.pool.impl.GenericObjectPool;
import org.springframework.util.Assert;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
public class SandBoxConnection implements Connection {
private Connection delegate;
private GenericObjectPool connectionPool;
public SandBoxConnection(Connection delegate, GenericObjectPool connectionPool) {
this.delegate = delegate;
this.connectionPool = connectionPool;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
Assert.notNull(iface, "Interface argument must not be null");
if (!Connection.class.equals(iface)) {
throw new SQLException("Connection of type [" + getClass().getName() +
"] can only be unwrapped as [java.sql.Connection], not as [" + iface.getName());
}
return (T) delegate;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return Connection.class.equals(iface);
}
@Override
public Statement createStatement() throws SQLException {
return delegate.createStatement();
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
return delegate.prepareStatement(sql);
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
return delegate.prepareCall(sql);
}
@Override
public String nativeSQL(String sql) throws SQLException {
return delegate.nativeSQL(sql);
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
delegate.setAutoCommit(autoCommit);
}
@Override
public boolean getAutoCommit() throws SQLException {
return delegate.getAutoCommit();
}
@Override
public void commit() throws SQLException {
delegate.commit();
}
@Override
public void rollback() throws SQLException {
delegate.rollback();
}
@Override
public void close() throws SQLException {
try {
connectionPool.returnObject(this);
} catch (Exception e) {
throw new SQLException(e);
}
}
@Override
public boolean isClosed() throws SQLException {
return delegate.isClosed();
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
return delegate.getMetaData();
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
delegate.setReadOnly(readOnly);
}
@Override
public boolean isReadOnly() throws SQLException {
return delegate.isReadOnly();
}
@Override
public void setCatalog(String catalog) throws SQLException {
delegate.setCatalog(catalog);
}
@Override
public String getCatalog() throws SQLException {
return delegate.getCatalog();
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
delegate.setTransactionIsolation(level);
}
@Override
public int getTransactionIsolation() throws SQLException {
return delegate.getTransactionIsolation();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return delegate.getWarnings();
}
@Override
public void clearWarnings() throws SQLException {
delegate.clearWarnings();
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency)
throws SQLException {
return delegate.createStatement(resultSetType, resultSetConcurrency);
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
return delegate.prepareStatement(sql, resultSetType,
resultSetConcurrency);
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
return delegate.prepareCall(sql, resultSetType, resultSetConcurrency);
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
return delegate.getTypeMap();
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
delegate.setTypeMap(map);
}
@Override
public void setHoldability(int holdability) throws SQLException {
delegate.setHoldability(holdability);
}
@Override
public int getHoldability() throws SQLException {
return delegate.getHoldability();
}
@Override
public Savepoint setSavepoint() throws SQLException {
return delegate.setSavepoint();
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
return delegate.setSavepoint(name);
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
delegate.rollback(savepoint);
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
delegate.releaseSavepoint(savepoint);
}
@Override
public Statement createStatement(int resultSetType,
int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
return delegate.createStatement(resultSetType, resultSetConcurrency,
resultSetHoldability);
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
return delegate.prepareStatement(sql, resultSetType,
resultSetConcurrency, resultSetHoldability);
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
return delegate.prepareCall(sql, resultSetType, resultSetConcurrency,
resultSetHoldability);
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
throws SQLException {
return delegate.prepareStatement(sql, autoGeneratedKeys);
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes)
throws SQLException {
return delegate.prepareStatement(sql, columnIndexes);
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames)
throws SQLException {
return delegate.prepareStatement(sql, columnNames);
}
@Override
public Clob createClob() throws SQLException {
return delegate.createClob();
}
@Override
public Blob createBlob() throws SQLException {
return delegate.createBlob();
}
@Override
public NClob createNClob() throws SQLException {
return delegate.createNClob();
}
@Override
public SQLXML createSQLXML() throws SQLException {
return delegate.createSQLXML();
}
@Override
public boolean isValid(int timeout) throws SQLException {
return delegate.isValid(timeout);
}
@Override
public void setClientInfo(String name, String value)
throws SQLClientInfoException {
delegate.setClientInfo(name, value);
}
@Override
public void setClientInfo(Properties properties)
throws SQLClientInfoException {
delegate.setClientInfo(properties);
}
@Override
public String getClientInfo(String name) throws SQLException {
return delegate.getClientInfo(name);
}
@Override
public Properties getClientInfo() throws SQLException {
return delegate.getClientInfo();
}
@Override
public Array createArrayOf(String typeName, Object[] elements)
throws SQLException {
return delegate.createArrayOf(typeName, elements);
}
@Override
public Struct createStruct(String typeName, Object[] attributes)
throws SQLException {
return delegate.createStruct(typeName, attributes);
}
public void setSchema(String schema) throws SQLException {
try {
Class<? extends Connection> delegateClass = delegate.getClass();
Class partypes[] = new Class[1];
partypes[0] = String.class;
Object args[] = new Object[1];
args[0] = schema;
Method method;
method = delegateClass.getMethod("setSchema", partypes);
method.invoke(delegate, args);
} catch (SecurityException e) {
// ignore exceptions
} catch (NoSuchMethodException e) {
// ignore exceptions
} catch (IllegalArgumentException e) {
// ignore exceptions
} catch (IllegalAccessException e) {
// ignore exceptions
} catch (InvocationTargetException e) {
// ignore exceptions
}
}
public String getSchema() throws SQLException {
String returnValue = null;
try {
Class<? extends Connection> delegateClass = delegate.getClass();
Method method = delegateClass.getMethod("getSchema");
returnValue = method.invoke(delegate).toString();
} catch (SecurityException e) {
// ignore exceptions
} catch (NoSuchMethodException e) {
// ignore exceptions
} catch (IllegalArgumentException e) {
// ignore exceptions
} catch (IllegalAccessException e) {
// ignore exceptions
} catch (InvocationTargetException e) {
// ignore exceptions
}
return returnValue;
}
public void abort(Executor executor) throws SQLException {
try {
Class<? extends Connection> delegateClass = delegate.getClass();
Class partypes[] = new Class[1];
partypes[0] = Executor.class;
Object args[] = new Object[1];
args[0] = executor;
Method method = delegateClass.getMethod("abort", partypes);
method.invoke(delegate, args);
} catch (SecurityException e) {
// ignore exceptions
} catch (NoSuchMethodException e) {
// ignore exceptions
} catch (IllegalArgumentException e) {
// ignore exceptions
} catch (IllegalAccessException e) {
// ignore exceptions
} catch (InvocationTargetException e) {
// ignore exceptions
}
}
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
try {
Class<? extends Connection> delegateClass = delegate.getClass();
Class partypes[] = new Class[2];
partypes[0] = Executor.class;
partypes[1] = int.class;
Object args[] = new Object[2];
args[0] = executor;
args[1] = milliseconds;
Method method = delegateClass.getMethod("setNetworkTimeout", partypes);
method.invoke(delegate, args);
} catch (SecurityException e) {
// ignore exceptions
} catch (NoSuchMethodException e) {
// ignore exceptions
} catch (IllegalArgumentException e) {
// ignore exceptions
} catch (IllegalAccessException e) {
// ignore exceptions
} catch (InvocationTargetException e) {
// ignore exceptions
}
}
public int getNetworkTimeout() throws SQLException {
int returnValue = 0;
try {
Class<? extends Connection> delegateClass = delegate.getClass();
Method method = delegateClass.getMethod("getNetworkTimeout");
returnValue = Integer.parseInt(method.invoke(delegate).toString());
} catch (SecurityException e) {
// ignore exceptions
} catch (NoSuchMethodException e) {
// ignore exceptions
} catch (IllegalArgumentException e) {
// ignore exceptions
} catch (IllegalAccessException e) {
// ignore exceptions
} catch (InvocationTargetException e) {
// ignore exceptions
}
return returnValue;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.ai.metricsadvisor.implementation.models;
import com.azure.core.annotation.Fluent;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import java.time.OffsetDateTime;
import java.util.List;
import java.util.UUID;
/** The DataFeedDetail model. */
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "dataSourceType",
defaultImpl = DataFeedDetail.class)
@JsonTypeName("DataFeedDetail")
@JsonSubTypes({
@JsonSubTypes.Type(name = "AzureApplicationInsights", value = AzureApplicationInsightsDataFeed.class),
@JsonSubTypes.Type(name = "AzureBlob", value = AzureBlobDataFeed.class),
@JsonSubTypes.Type(name = "AzureCosmosDB", value = AzureCosmosDBDataFeed.class),
@JsonSubTypes.Type(name = "AzureDataExplorer", value = AzureDataExplorerDataFeed.class),
@JsonSubTypes.Type(name = "AzureDataLakeStorageGen2", value = AzureDataLakeStorageGen2DataFeed.class),
@JsonSubTypes.Type(name = "AzureEventHubs", value = AzureEventHubsDataFeed.class),
@JsonSubTypes.Type(name = "AzureLogAnalytics", value = AzureLogAnalyticsDataFeed.class),
@JsonSubTypes.Type(name = "AzureTable", value = AzureTableDataFeed.class),
@JsonSubTypes.Type(name = "InfluxDB", value = InfluxDBDataFeed.class),
@JsonSubTypes.Type(name = "MySql", value = MySqlDataFeed.class),
@JsonSubTypes.Type(name = "PostgreSql", value = PostgreSqlDataFeed.class),
@JsonSubTypes.Type(name = "SqlServer", value = SQLServerDataFeed.class),
@JsonSubTypes.Type(name = "MongoDB", value = MongoDBDataFeed.class)
})
@Fluent
public class DataFeedDetail {
/*
* data feed unique id
*/
@JsonProperty(value = "dataFeedId", access = JsonProperty.Access.WRITE_ONLY)
private UUID dataFeedId;
/*
* data feed name
*/
@JsonProperty(value = "dataFeedName", required = true)
private String dataFeedName;
/*
* data feed description
*/
@JsonProperty(value = "dataFeedDescription")
private String dataFeedDescription;
/*
* granularity of the time series
*/
@JsonProperty(value = "granularityName", required = true)
private Granularity granularityName;
/*
* if granularity is custom,it is required.
*/
@JsonProperty(value = "granularityAmount")
private Integer granularityAmount;
/*
* measure list
*/
@JsonProperty(value = "metrics", required = true)
private List<DataFeedMetric> metrics;
/*
* dimension list
*/
@JsonProperty(value = "dimension")
private List<DataFeedDimension> dimension;
/*
* user-defined timestamp column. if timestampColumn is null, start time of
* every time slice will be used as default value.
*/
@JsonProperty(value = "timestampColumn")
private String timestampColumn;
/*
* ingestion start time
*/
@JsonProperty(value = "dataStartFrom", required = true)
private OffsetDateTime dataStartFrom;
/*
* the time that the beginning of data ingestion task will delay for every
* data slice according to this offset.
*/
@JsonProperty(value = "startOffsetInSeconds")
private Long startOffsetInSeconds;
/*
* the max concurrency of data ingestion queries against user data source.
* 0 means no limitation.
*/
@JsonProperty(value = "maxConcurrency")
private Integer maxConcurrency;
/*
* the min retry interval for failed data ingestion tasks.
*/
@JsonProperty(value = "minRetryIntervalInSeconds")
private Long minRetryIntervalInSeconds;
/*
* stop retry data ingestion after the data slice first schedule time in
* seconds.
*/
@JsonProperty(value = "stopRetryAfterInSeconds")
private Long stopRetryAfterInSeconds;
/*
* mark if the data feed need rollup
*/
@JsonProperty(value = "needRollup")
private NeedRollupEnum needRollup;
/*
* roll up method
*/
@JsonProperty(value = "rollUpMethod")
private RollUpMethod rollUpMethod;
/*
* roll up columns
*/
@JsonProperty(value = "rollUpColumns")
private List<String> rollUpColumns;
/*
* the identification value for the row of calculated all-up value.
*/
@JsonProperty(value = "allUpIdentification")
private String allUpIdentification;
/*
* the type of fill missing point for anomaly detection
*/
@JsonProperty(value = "fillMissingPointType")
private FillMissingPointType fillMissingPointType;
/*
* the value of fill missing point for anomaly detection
*/
@JsonProperty(value = "fillMissingPointValue")
private Double fillMissingPointValue;
/*
* data feed access mode, default is Private
*/
@JsonProperty(value = "viewMode")
private ViewMode viewMode;
/*
* data feed administrator
*/
@JsonProperty(value = "admins")
private List<String> admins;
/*
* data feed viewer
*/
@JsonProperty(value = "viewers")
private List<String> viewers;
/*
* the query user is one of data feed administrator or not
*/
@JsonProperty(value = "isAdmin", access = JsonProperty.Access.WRITE_ONLY)
private Boolean isAdmin;
/*
* data feed creator
*/
@JsonProperty(value = "creator", access = JsonProperty.Access.WRITE_ONLY)
private String creator;
/*
* data feed status
*/
@JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY)
private EntityStatus status;
/*
* data feed created time
*/
@JsonProperty(value = "createdTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime createdTime;
/*
* action link for alert
*/
@JsonProperty(value = "actionLinkTemplate")
private String actionLinkTemplate;
/*
* authentication type for corresponding data source
*/
@JsonProperty(value = "authenticationType")
private AuthenticationTypeEnum authenticationType;
/*
* The credential entity id
*/
@JsonProperty(value = "credentialId")
private String credentialId;
/**
* Get the dataFeedId property: data feed unique id.
*
* @return the dataFeedId value.
*/
public UUID getDataFeedId() {
return this.dataFeedId;
}
/**
* Get the dataFeedName property: data feed name.
*
* @return the dataFeedName value.
*/
public String getDataFeedName() {
return this.dataFeedName;
}
/**
* Set the dataFeedName property: data feed name.
*
* @param dataFeedName the dataFeedName value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setDataFeedName(String dataFeedName) {
this.dataFeedName = dataFeedName;
return this;
}
/**
* Get the dataFeedDescription property: data feed description.
*
* @return the dataFeedDescription value.
*/
public String getDataFeedDescription() {
return this.dataFeedDescription;
}
/**
* Set the dataFeedDescription property: data feed description.
*
* @param dataFeedDescription the dataFeedDescription value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setDataFeedDescription(String dataFeedDescription) {
this.dataFeedDescription = dataFeedDescription;
return this;
}
/**
* Get the granularityName property: granularity of the time series.
*
* @return the granularityName value.
*/
public Granularity getGranularityName() {
return this.granularityName;
}
/**
* Set the granularityName property: granularity of the time series.
*
* @param granularityName the granularityName value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setGranularityName(Granularity granularityName) {
this.granularityName = granularityName;
return this;
}
/**
* Get the granularityAmount property: if granularity is custom,it is required.
*
* @return the granularityAmount value.
*/
public Integer getGranularityAmount() {
return this.granularityAmount;
}
/**
* Set the granularityAmount property: if granularity is custom,it is required.
*
* @param granularityAmount the granularityAmount value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setGranularityAmount(Integer granularityAmount) {
this.granularityAmount = granularityAmount;
return this;
}
/**
* Get the metrics property: measure list.
*
* @return the metrics value.
*/
public List<DataFeedMetric> getMetrics() {
return this.metrics;
}
/**
* Set the metrics property: measure list.
*
* @param metrics the metrics value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setMetrics(List<DataFeedMetric> metrics) {
this.metrics = metrics;
return this;
}
/**
* Get the dimension property: dimension list.
*
* @return the dimension value.
*/
public List<DataFeedDimension> getDimension() {
return this.dimension;
}
/**
* Set the dimension property: dimension list.
*
* @param dimension the dimension value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setDimension(List<DataFeedDimension> dimension) {
this.dimension = dimension;
return this;
}
/**
* Get the timestampColumn property: user-defined timestamp column. if timestampColumn is null, start time of every
* time slice will be used as default value.
*
* @return the timestampColumn value.
*/
public String getTimestampColumn() {
return this.timestampColumn;
}
/**
* Set the timestampColumn property: user-defined timestamp column. if timestampColumn is null, start time of every
* time slice will be used as default value.
*
* @param timestampColumn the timestampColumn value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setTimestampColumn(String timestampColumn) {
this.timestampColumn = timestampColumn;
return this;
}
/**
* Get the dataStartFrom property: ingestion start time.
*
* @return the dataStartFrom value.
*/
public OffsetDateTime getDataStartFrom() {
return this.dataStartFrom;
}
/**
* Set the dataStartFrom property: ingestion start time.
*
* @param dataStartFrom the dataStartFrom value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setDataStartFrom(OffsetDateTime dataStartFrom) {
this.dataStartFrom = dataStartFrom;
return this;
}
/**
* Get the startOffsetInSeconds property: the time that the beginning of data ingestion task will delay for every
* data slice according to this offset.
*
* @return the startOffsetInSeconds value.
*/
public Long getStartOffsetInSeconds() {
return this.startOffsetInSeconds;
}
/**
* Set the startOffsetInSeconds property: the time that the beginning of data ingestion task will delay for every
* data slice according to this offset.
*
* @param startOffsetInSeconds the startOffsetInSeconds value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setStartOffsetInSeconds(Long startOffsetInSeconds) {
this.startOffsetInSeconds = startOffsetInSeconds;
return this;
}
/**
* Get the maxConcurrency property: the max concurrency of data ingestion queries against user data source. 0 means
* no limitation.
*
* @return the maxConcurrency value.
*/
public Integer getMaxConcurrency() {
return this.maxConcurrency;
}
/**
* Set the maxConcurrency property: the max concurrency of data ingestion queries against user data source. 0 means
* no limitation.
*
* @param maxConcurrency the maxConcurrency value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setMaxConcurrency(Integer maxConcurrency) {
this.maxConcurrency = maxConcurrency;
return this;
}
/**
* Get the minRetryIntervalInSeconds property: the min retry interval for failed data ingestion tasks.
*
* @return the minRetryIntervalInSeconds value.
*/
public Long getMinRetryIntervalInSeconds() {
return this.minRetryIntervalInSeconds;
}
/**
* Set the minRetryIntervalInSeconds property: the min retry interval for failed data ingestion tasks.
*
* @param minRetryIntervalInSeconds the minRetryIntervalInSeconds value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setMinRetryIntervalInSeconds(Long minRetryIntervalInSeconds) {
this.minRetryIntervalInSeconds = minRetryIntervalInSeconds;
return this;
}
/**
* Get the stopRetryAfterInSeconds property: stop retry data ingestion after the data slice first schedule time in
* seconds.
*
* @return the stopRetryAfterInSeconds value.
*/
public Long getStopRetryAfterInSeconds() {
return this.stopRetryAfterInSeconds;
}
/**
* Set the stopRetryAfterInSeconds property: stop retry data ingestion after the data slice first schedule time in
* seconds.
*
* @param stopRetryAfterInSeconds the stopRetryAfterInSeconds value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setStopRetryAfterInSeconds(Long stopRetryAfterInSeconds) {
this.stopRetryAfterInSeconds = stopRetryAfterInSeconds;
return this;
}
/**
* Get the needRollup property: mark if the data feed need rollup.
*
* @return the needRollup value.
*/
public NeedRollupEnum getNeedRollup() {
return this.needRollup;
}
/**
* Set the needRollup property: mark if the data feed need rollup.
*
* @param needRollup the needRollup value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setNeedRollup(NeedRollupEnum needRollup) {
this.needRollup = needRollup;
return this;
}
/**
* Get the rollUpMethod property: roll up method.
*
* @return the rollUpMethod value.
*/
public RollUpMethod getRollUpMethod() {
return this.rollUpMethod;
}
/**
* Set the rollUpMethod property: roll up method.
*
* @param rollUpMethod the rollUpMethod value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setRollUpMethod(RollUpMethod rollUpMethod) {
this.rollUpMethod = rollUpMethod;
return this;
}
/**
* Get the rollUpColumns property: roll up columns.
*
* @return the rollUpColumns value.
*/
public List<String> getRollUpColumns() {
return this.rollUpColumns;
}
/**
* Set the rollUpColumns property: roll up columns.
*
* @param rollUpColumns the rollUpColumns value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setRollUpColumns(List<String> rollUpColumns) {
this.rollUpColumns = rollUpColumns;
return this;
}
/**
* Get the allUpIdentification property: the identification value for the row of calculated all-up value.
*
* @return the allUpIdentification value.
*/
public String getAllUpIdentification() {
return this.allUpIdentification;
}
/**
* Set the allUpIdentification property: the identification value for the row of calculated all-up value.
*
* @param allUpIdentification the allUpIdentification value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setAllUpIdentification(String allUpIdentification) {
this.allUpIdentification = allUpIdentification;
return this;
}
/**
* Get the fillMissingPointType property: the type of fill missing point for anomaly detection.
*
* @return the fillMissingPointType value.
*/
public FillMissingPointType getFillMissingPointType() {
return this.fillMissingPointType;
}
/**
* Set the fillMissingPointType property: the type of fill missing point for anomaly detection.
*
* @param fillMissingPointType the fillMissingPointType value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setFillMissingPointType(FillMissingPointType fillMissingPointType) {
this.fillMissingPointType = fillMissingPointType;
return this;
}
/**
* Get the fillMissingPointValue property: the value of fill missing point for anomaly detection.
*
* @return the fillMissingPointValue value.
*/
public Double getFillMissingPointValue() {
return this.fillMissingPointValue;
}
/**
* Set the fillMissingPointValue property: the value of fill missing point for anomaly detection.
*
* @param fillMissingPointValue the fillMissingPointValue value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setFillMissingPointValue(Double fillMissingPointValue) {
this.fillMissingPointValue = fillMissingPointValue;
return this;
}
/**
* Get the viewMode property: data feed access mode, default is Private.
*
* @return the viewMode value.
*/
public ViewMode getViewMode() {
return this.viewMode;
}
/**
* Set the viewMode property: data feed access mode, default is Private.
*
* @param viewMode the viewMode value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setViewMode(ViewMode viewMode) {
this.viewMode = viewMode;
return this;
}
/**
* Get the admins property: data feed administrator.
*
* @return the admins value.
*/
public List<String> getAdmins() {
return this.admins;
}
/**
* Set the admins property: data feed administrator.
*
* @param admins the admins value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setAdmins(List<String> admins) {
this.admins = admins;
return this;
}
/**
* Get the viewers property: data feed viewer.
*
* @return the viewers value.
*/
public List<String> getViewers() {
return this.viewers;
}
/**
* Set the viewers property: data feed viewer.
*
* @param viewers the viewers value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setViewers(List<String> viewers) {
this.viewers = viewers;
return this;
}
/**
* Get the isAdmin property: the query user is one of data feed administrator or not.
*
* @return the isAdmin value.
*/
public Boolean isAdmin() {
return this.isAdmin;
}
/**
* Get the creator property: data feed creator.
*
* @return the creator value.
*/
public String getCreator() {
return this.creator;
}
/**
* Get the status property: data feed status.
*
* @return the status value.
*/
public EntityStatus getStatus() {
return this.status;
}
/**
* Get the createdTime property: data feed created time.
*
* @return the createdTime value.
*/
public OffsetDateTime getCreatedTime() {
return this.createdTime;
}
/**
* Get the actionLinkTemplate property: action link for alert.
*
* @return the actionLinkTemplate value.
*/
public String getActionLinkTemplate() {
return this.actionLinkTemplate;
}
/**
* Set the actionLinkTemplate property: action link for alert.
*
* @param actionLinkTemplate the actionLinkTemplate value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setActionLinkTemplate(String actionLinkTemplate) {
this.actionLinkTemplate = actionLinkTemplate;
return this;
}
/**
* Get the authenticationType property: authentication type for corresponding data source.
*
* @return the authenticationType value.
*/
public AuthenticationTypeEnum getAuthenticationType() {
return this.authenticationType;
}
/**
* Set the authenticationType property: authentication type for corresponding data source.
*
* @param authenticationType the authenticationType value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setAuthenticationType(AuthenticationTypeEnum authenticationType) {
this.authenticationType = authenticationType;
return this;
}
/**
* Get the credentialId property: The credential entity id.
*
* @return the credentialId value.
*/
public String getCredentialId() {
return this.credentialId;
}
/**
* Set the credentialId property: The credential entity id.
*
* @param credentialId the credentialId value to set.
* @return the DataFeedDetail object itself.
*/
public DataFeedDetail setCredentialId(String credentialId) {
this.credentialId = credentialId;
return this;
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.app;
import android.content.Context;
import android.os.Handler;
import android.os.IBinder;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.os.UserHandle;
import android.util.Log;
/**
* Class to notify the user of events that happen. This is how you tell
* the user that something has happened in the background. {@more}
*
* Notifications can take different forms:
* <ul>
* <li>A persistent icon that goes in the status bar and is accessible
* through the launcher, (when the user selects it, a designated Intent
* can be launched),</li>
* <li>Turning on or flashing LEDs on the device, or</li>
* <li>Alerting the user by flashing the backlight, playing a sound,
* or vibrating.</li>
* </ul>
*
* <p>
* Each of the notify methods takes an int id parameter and optionally a
* {@link String} tag parameter, which may be {@code null}. These parameters
* are used to form a pair (tag, id), or ({@code null}, id) if tag is
* unspecified. This pair identifies this notification from your app to the
* system, so that pair should be unique within your app. If you call one
* of the notify methods with a (tag, id) pair that is currently active and
* a new set of notification parameters, it will be updated. For example,
* if you pass a new status bar icon, the old icon in the status bar will
* be replaced with the new one. This is also the same tag and id you pass
* to the {@link #cancel(int)} or {@link #cancel(String, int)} method to clear
* this notification.
*
* <p>
* You do not instantiate this class directly; instead, retrieve it through
* {@link android.content.Context#getSystemService}.
*
* <div class="special reference">
* <h3>Developer Guides</h3>
* <p>For a guide to creating notifications, read the
* <a href="{@docRoot}guide/topics/ui/notifiers/notifications.html">Status Bar Notifications</a>
* developer guide.</p>
* </div>
*
* @see android.app.Notification
* @see android.content.Context#getSystemService
*/
public class NotificationManager
{
private static String TAG = "NotificationManager";
private static boolean localLOGV = false;
private static INotificationManager sService;
/** @hide */
static public INotificationManager getService()
{
if (sService != null) {
return sService;
}
IBinder b = ServiceManager.getService("notification");
sService = INotificationManager.Stub.asInterface(b);
return sService;
}
/*package*/ NotificationManager(Context context, Handler handler)
{
mContext = context;
}
/** {@hide} */
public static NotificationManager from(Context context) {
return (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
}
/**
* Post a notification to be shown in the status bar. If a notification with
* the same id has already been posted by your application and has not yet been canceled, it
* will be replaced by the updated information.
*
* @param id An identifier for this notification unique within your
* application.
* @param notification A {@link Notification} object describing what to show the user. Must not
* be null.
*/
public void notify(int id, Notification notification)
{
notify(null, id, notification);
}
/**
* Post a notification to be shown in the status bar. If a notification with
* the same tag and id has already been posted by your application and has not yet been
* canceled, it will be replaced by the updated information.
*
* @param tag A string identifier for this notification. May be {@code null}.
* @param id An identifier for this notification. The pair (tag, id) must be unique
* within your application.
* @param notification A {@link Notification} object describing what to
* show the user. Must not be null.
*/
public void notify(String tag, int id, Notification notification)
{
int[] idOut = new int[1];
INotificationManager service = getService();
String pkg = mContext.getPackageName();
if (notification.sound != null) {
notification.sound = notification.sound.getCanonicalUri();
}
if (localLOGV) Log.v(TAG, pkg + ": notify(" + id + ", " + notification + ")");
try {
service.enqueueNotificationWithTag(pkg, tag, id, notification, idOut,
UserHandle.myUserId());
if (id != idOut[0]) {
Log.w(TAG, "notify: id corrupted: sent " + id + ", got back " + idOut[0]);
}
} catch (RemoteException e) {
}
}
/**
* @hide
*/
public void notifyAsUser(String tag, int id, Notification notification, UserHandle user)
{
int[] idOut = new int[1];
INotificationManager service = getService();
String pkg = mContext.getPackageName();
if (notification.sound != null) {
notification.sound = notification.sound.getCanonicalUri();
}
if (localLOGV) Log.v(TAG, pkg + ": notify(" + id + ", " + notification + ")");
try {
service.enqueueNotificationWithTag(pkg, tag, id, notification, idOut,
user.getIdentifier());
if (id != idOut[0]) {
Log.w(TAG, "notify: id corrupted: sent " + id + ", got back " + idOut[0]);
}
} catch (RemoteException e) {
}
}
/**
* Cancel a previously shown notification. If it's transient, the view
* will be hidden. If it's persistent, it will be removed from the status
* bar.
*/
public void cancel(int id)
{
cancel(null, id);
}
/**
* Cancel a previously shown notification. If it's transient, the view
* will be hidden. If it's persistent, it will be removed from the status
* bar.
*/
public void cancel(String tag, int id)
{
INotificationManager service = getService();
String pkg = mContext.getPackageName();
if (localLOGV) Log.v(TAG, pkg + ": cancel(" + id + ")");
try {
service.cancelNotificationWithTag(pkg, tag, id, UserHandle.myUserId());
} catch (RemoteException e) {
}
}
/**
* @hide
*/
public void cancelAsUser(String tag, int id, UserHandle user)
{
INotificationManager service = getService();
String pkg = mContext.getPackageName();
if (localLOGV) Log.v(TAG, pkg + ": cancel(" + id + ")");
try {
service.cancelNotificationWithTag(pkg, tag, id, user.getIdentifier());
} catch (RemoteException e) {
}
}
/**
* Cancel all previously shown notifications. See {@link #cancel} for the
* detailed behavior.
*/
public void cancelAll()
{
INotificationManager service = getService();
String pkg = mContext.getPackageName();
if (localLOGV) Log.v(TAG, pkg + ": cancelAll()");
try {
service.cancelAllNotifications(pkg, UserHandle.myUserId());
} catch (RemoteException e) {
}
}
private Context mContext;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Alexander Y. Kleymenov
* @version $Revision$
*/
package javax.crypto;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.security.Key;
import java.util.Arrays;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import javax.crypto.NullCipher;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import junit.framework.TestCase;
/**
*/
public class SealedObjectTest extends TestCase {
/**
* readObject(ObjectInputStream s) method testing. Tests if the
* serialization/deserialization works correctly: object is serialized,
* deserialized, the content od deserialized object equals to the
* content of initial object.
*/
public void testReadObject() throws Exception {
String secret = "secret string";
SealedObject so = new SealedObject(secret, new NullCipher());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(so);
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(
bos.toByteArray()));
SealedObject so_des = (SealedObject) ois.readObject();
assertEquals("The secret content of deserialized object "
+ "should be equal to the secret content of initial object",
secret, so_des.getObject(new NullCipher()));
assertEquals("The value returned by getAlgorithm() method of "
+ "deserialized object should be equal to the value returned "
+ "by getAlgorithm() method of initial object", so
.getAlgorithm(), so_des.getAlgorithm());
}
/**
* SealedObject(Serializable object, Cipher c) method testing. Tests if the
* NullPointerException is thrown in the case of null cipher.
*/
public void testSealedObject1() throws Exception {
String secret = "secret string";
try {
new SealedObject(secret, null);
fail("NullPointerException should be thrown in the case "
+ "of null cipher.");
} catch (NullPointerException e) {
}
}
/**
* SealedObject(SealedObject so) method testing. Tests if the
* NullPointerException is thrown in the case of null SealedObject.
*/
public void testSealedObject2() throws Exception {
try {
new SealedObject(null);
fail("NullPointerException should be thrown in the case "
+ "of null SealedObject.");
} catch (NullPointerException e) {
}
String secret = "secret string";
Cipher cipher = new NullCipher();
SealedObject so1 = new SealedObject(secret, cipher);
SealedObject so2 = new SealedObject(so1);
assertEquals("The secret content of the object should equals "
+ "to the secret content of initial object.", secret, so2
.getObject(cipher));
assertEquals("The algorithm which was used to seal the object "
+ "should be the same as the algorithm used to seal the "
+ "initial object", so1.getAlgorithm(), so2.getAlgorithm());
}
/**
* getAlgorithm() method testing. Tests if the returned value equals to the
* corresponding value of Cipher object.
*/
public void testGetAlgorithm() throws Exception {
String secret = "secret string";
String algorithm = "DES";
KeyGenerator kg = KeyGenerator.getInstance(algorithm);
Key key = kg.generateKey();
Cipher cipher = Cipher.getInstance(algorithm);
cipher.init(Cipher.ENCRYPT_MODE, key);
SealedObject so = new SealedObject(secret, cipher);
assertEquals("The algorithm name should be the same as used "
+ "in cipher.", algorithm, so.getAlgorithm());
}
/**
* getObject(Key key) method testing. Tests if the object sealed with
* encryption algorithm and specified parameters can be retrieved by
* specifying the cryptographic key.
*/
public void testGetObject1() throws Exception {
KeyGenerator kg = KeyGenerator.getInstance("DES");
Key key = kg.generateKey();
IvParameterSpec ips = new IvParameterSpec(new byte[] { 1, 2, 3, 4, 5,
6, 7, 8 });
Cipher cipher = Cipher.getInstance("DES/CBC/PKCS5Padding");
cipher.init(Cipher.ENCRYPT_MODE, key, ips);
String secret = "secret string";
SealedObject so = new SealedObject(secret, cipher);
assertEquals("The returned object does not equals to the "
+ "original object.", secret, so.getObject(key));
assertTrue("The encodedParams field of SealedObject object "
+ "should contain the encoded algorithm parameters.", Arrays
.equals(so.encodedParams, cipher.getParameters().getEncoded()));
}
/**
* getObject(Cipher c) method testing. Tests if the proper exception is
* thrown in the case of incorrect input parameters and if the object sealed
* with encryption algorithm and specified parameters can be retrieved by
* specifying the initialized Cipher object.
*/
public void testGetObject2() throws Exception {
try {
new SealedObject("secret string", new NullCipher())
.getObject((Cipher) null);
fail("NullPointerException should be thrown in the case of "
+ "null cipher.");
} catch (NullPointerException e) {
}
KeyGenerator kg = KeyGenerator.getInstance("DES");
Key key = kg.generateKey();
IvParameterSpec ips = new IvParameterSpec(new byte[] { 1, 2, 3, 4, 5,
6, 7, 8 });
Cipher cipher = Cipher.getInstance("DES/CBC/PKCS5Padding");
cipher.init(Cipher.ENCRYPT_MODE, key, ips);
String secret = "secret string";
SealedObject so = new SealedObject(secret, cipher);
cipher.init(Cipher.DECRYPT_MODE, key, ips);
assertEquals("The returned object does not equals to the "
+ "original object.", secret, so.getObject(cipher));
}
/**
* getObject(Key key, String provider) method testing. Tests if the proper
* exception is thrown in the case of incorrect input parameters and if the
* object sealed with encryption algorithm can be retrieved by specifying
* the cryptographic key and provider name.
*/
public void testGetObject3() throws Exception {
try {
new SealedObject("secret string", new NullCipher()).getObject(
new SecretKeySpec(new byte[] { 0, 0, 0 }, "algorithm"),
null);
fail("IllegalArgumentException should be thrown in the case of "
+ "null provider.");
} catch (IllegalArgumentException e) {
}
try {
new SealedObject("secret string", new NullCipher()).getObject(
new SecretKeySpec(new byte[] { 0, 0, 0 }, "algorithm"), "");
fail("IllegalArgumentException should be thrown in the case of "
+ "empty provider.");
} catch (IllegalArgumentException e) {
}
KeyGenerator kg = KeyGenerator.getInstance("DES");
Key key = kg.generateKey();
Cipher cipher = Cipher.getInstance("DES");
String provider = cipher.getProvider().getName();
cipher.init(Cipher.ENCRYPT_MODE, key);
String secret = "secret string";
SealedObject so = new SealedObject(secret, cipher);
cipher.init(Cipher.DECRYPT_MODE, key);
assertEquals("The returned object does not equals to the "
+ "original object.", secret, so.getObject(key, provider));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.client;
import org.junit.Before;
import org.junit.Test;
import java.util.concurrent.CountDownLatch;
import javax.jms.BytesMessage;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.DeliveryMode;
import javax.jms.JMSException;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.Topic;
import javax.jms.TopicSubscriber;
import org.apache.activemq.artemis.tests.integration.IntegrationTestLogger;
import org.apache.activemq.artemis.tests.util.JMSTestBase;
public class FlowControlOnIgnoreLargeMessageBodyTest extends JMSTestBase {
IntegrationTestLogger log = IntegrationTestLogger.LOGGER;
private Topic topic;
private static int TOTAL_MESSAGES_COUNT = 20000;
private static int MSG_SIZE = 150 * 1024;
private final int CONSUMERS_COUNT = 5;
private static final String ATTR_MSG_COUNTER = "msgIdex";
protected int receiveTimeout = 10000;
private volatile boolean error = false;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
jmsServer.createTopic(true, "topicIn", "/topic/topicIn");
topic = (Topic) namingContext.lookup("/topic/topicIn");
}
@Override
protected boolean usePersistence() {
return false;
}
/**
* LoadProducer
*/
class LoadProducer extends Thread {
private final ConnectionFactory cf;
private final Topic topic;
private final int messagesCount;
private volatile boolean requestForStop = false;
private volatile boolean stopped = false;
private int sentMessages = 0;
LoadProducer(final String name,
final Topic topic,
final ConnectionFactory cf,
final int messagesCount) throws Exception {
super(name);
this.cf = cf;
this.topic = topic;
this.messagesCount = messagesCount;
}
public void sendStopRequest() {
stopped = false;
requestForStop = true;
}
public boolean isStopped() {
return stopped;
}
@Override
public void run() {
stopped = false;
Connection connection = null;
Session session = null;
MessageProducer prod;
log.info("Starting producer for " + topic + " - " + getName());
try {
connection = cf.createConnection();
session = connection.createSession(true, Session.SESSION_TRANSACTED);
prod = session.createProducer(topic);
prod.setDeliveryMode(DeliveryMode.PERSISTENT);
for (int i = 1; i <= messagesCount && !requestForStop; i++) {
if (error) {
break;
}
sentMessages++;
BytesMessage msg = session.createBytesMessage();
msg.setIntProperty(FlowControlOnIgnoreLargeMessageBodyTest.ATTR_MSG_COUNTER, i);
msg.writeBytes(new byte[FlowControlOnIgnoreLargeMessageBodyTest.MSG_SIZE]);
prod.send(msg);
if (i % 10 == 0) {
session.commit();
}
if (i % 100 == 0) {
log.info("Address " + topic + " sent " + i + " messages");
}
}
System.out.println("Ending producer for " + topic + " - " + getName() + " messages " + sentMessages);
}
catch (Exception e) {
error = true;
e.printStackTrace();
}
finally {
try {
session.commit();
}
catch (Exception e) {
e.printStackTrace();
}
try {
connection.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
stopped = true;
}
public int getSentMessages() {
return sentMessages;
}
}
/**
* LoadConsumer
*/
class LoadConsumer extends Thread {
private final ConnectionFactory cf;
private final Topic topic;
private volatile boolean requestForStop = false;
private volatile boolean stopped = false;
private volatile int receivedMessages = 0;
private final int numberOfMessages;
private int receiveTimeout = 0;
private final CountDownLatch consumerCreated;
LoadConsumer(final CountDownLatch consumerCreated,
final String name,
final Topic topic,
final ConnectionFactory cf,
final int receiveTimeout,
final int numberOfMessages) {
super(name);
this.cf = cf;
this.topic = topic;
this.receiveTimeout = receiveTimeout;
this.numberOfMessages = numberOfMessages;
this.consumerCreated = consumerCreated;
}
public void sendStopRequest() {
stopped = false;
requestForStop = true;
}
public boolean isStopped() {
return stopped;
}
@Override
public void run() {
Connection connection = null;
Session session = null;
stopped = false;
requestForStop = false;
System.out.println("Starting consumer for " + topic + " - " + getName());
try {
connection = cf.createConnection();
connection.setClientID(getName());
connection.start();
session = connection.createSession(true, Session.SESSION_TRANSACTED);
TopicSubscriber subscriber = session.createDurableSubscriber(topic, getName());
consumerCreated.countDown();
int counter = 0;
while (counter < numberOfMessages && !requestForStop && !error) {
if (counter == 0) {
System.out.println("Starting to consume for " + topic + " - " + getName());
}
BytesMessage msg = (BytesMessage) subscriber.receive(receiveTimeout);
if (msg == null) {
System.out.println("Cannot get message in specified timeout: " + topic + " - " + getName());
error = true;
}
else {
counter++;
if (msg.getIntProperty(FlowControlOnIgnoreLargeMessageBodyTest.ATTR_MSG_COUNTER) != counter) {
error = true;
}
}
if (counter % 10 == 0) {
session.commit();
}
if (counter % 100 == 0) {
log.info("## " + getName() + " " + topic + " received " + counter);
}
receivedMessages = counter;
}
session.commit();
}
catch (Exception e) {
System.out.println("Exception in consumer " + getName() + " : " + e.getMessage());
e.printStackTrace();
}
finally {
if (session != null) {
try {
session.close();
}
catch (JMSException e) {
System.err.println("Cannot close session " + e.getMessage());
}
}
if (connection != null) {
try {
connection.close();
}
catch (JMSException e) {
System.err.println("Cannot close connection " + e.getMessage());
}
}
}
stopped = true;
System.out.println("Stopping consumer for " + topic +
" - " +
getName() +
", received " +
getReceivedMessages());
}
public int getReceivedMessages() {
return receivedMessages;
}
}
@Test
public void testFlowControl() {
try {
LoadProducer producer = new LoadProducer("producer", topic, cf, FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT);
LoadConsumer[] consumers = new LoadConsumer[CONSUMERS_COUNT];
CountDownLatch latch = new CountDownLatch(CONSUMERS_COUNT);
for (int i = 0; i < consumers.length; i++) {
consumers[i] = new LoadConsumer(latch, "consumer " + i, topic, cf, receiveTimeout, FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT);
}
for (LoadConsumer consumer : consumers) {
consumer.start();
}
waitForLatch(latch);
producer.start();
producer.join();
for (LoadConsumer consumer : consumers) {
consumer.join();
}
String errorMessage = null;
if (producer.getSentMessages() != FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT) {
errorMessage = "Producer did not send defined count of messages";
}
else {
for (LoadConsumer consumer : consumers) {
if (consumer.getReceivedMessages() != FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT) {
errorMessage = "Consumer did not send defined count of messages";
break;
}
}
}
if (errorMessage != null) {
System.err.println(" ERROR ERROR ERROR ERROR ERROR ERROR ERROR ERROR ERROR ");
System.err.println(errorMessage);
}
else {
System.out.println(" OK ");
}
assertFalse(error);
assertNull(errorMessage);
}
catch (Exception e) {
log.warn(e.getMessage(), e);
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bulenkov.darcula.ui;
import com.bulenkov.iconloader.util.GraphicsConfig;
import com.bulenkov.iconloader.util.GraphicsUtil;
import com.intellij.util.ui.JBDimension;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.basic.BasicSliderUI;
import java.awt.*;
import java.awt.geom.Area;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Rectangle2D;
import java.awt.geom.RoundRectangle2D;
/**
* @author Konstantin Bulenkov
*/
public class DarculaSliderUI extends BasicSliderUI {
public DarculaSliderUI(JSlider b) {
super(b);
}
@SuppressWarnings({"MethodOverridesStaticMethodOfSuperclass", "UnusedDeclaration"})
public static ComponentUI createUI(JComponent c) {
return new DarculaSliderUI((JSlider) c);
}
@Override
public void paintFocus(Graphics g) {
super.paintFocus(g);
}
@Override
public void paintTrack(Graphics g2d) {
Graphics2D g = (Graphics2D) g2d;
Rectangle trackBounds = trackRect;
final int arc = JBUI.scale(6);
int trackSize = JBUI.scale(6);
final GraphicsConfig config = GraphicsUtil.setupAAPainting(g);
final Color bg = getTrackBackground();
final Color selection = getThumbColor();
if (slider.getOrientation() == JSlider.HORIZONTAL) {
int cy = (trackBounds.height / 2) - trackSize / 2;
int cw = trackBounds.width;
g.translate(trackBounds.x, trackBounds.y + cy);
final Area shape = new Area(new RoundRectangle2D.Double(0, 0, cw, trackSize, arc, arc));
g.setColor(bg);
g.fill(shape);
int x = thumbRect.x;
shape.intersect(new Area(new Rectangle2D.Double(0, 0, x, trackSize)));
g.setColor(selection);
g.fill(shape);
g.translate(-trackBounds.x, -(trackBounds.y + cy));
} else {
int cx = (trackBounds.width / 2) - trackSize / 2;
int ch = trackBounds.height;
g.translate(trackBounds.x + cx, trackBounds.y);
final Area shape = new Area(new RoundRectangle2D.Double(0, 0, cx, ch, arc, arc));
g.setColor(bg);
g.fill(shape);
int y = thumbRect.y;
shape.intersect(new Area(new Rectangle2D.Double(0, y, cx, ch)));
g.setColor(selection);
g.fill(shape);
g.translate(-(trackBounds.x + cx), -trackBounds.y);
}
config.restore();
}
@Override
protected Dimension getThumbSize() {
if (isPlainThumb()) {
return new Dimension(JBUI.scale(20), JBUI.scale(20));
}
return slider.getOrientation() == JSlider.HORIZONTAL ? new JBDimension(12, 20) : new JBDimension(20, 12);
}
@NotNull
protected Color getTrackBackground() {
return UIManager.getColor("Slider.trackBackground");
}
@NotNull
protected Color getSelectedTrackColor() {
return UIManager.getColor("Slider.selectedTrackColor");
}
@NotNull
protected Color getDisabledTickColor() {
return UIManager.getColor("Slider.disabledTickColor");
}
@Override
protected void paintMinorTickForHorizSlider(Graphics g, Rectangle tickBounds, int x) {
checkDisabled(g);
super.paintMinorTickForHorizSlider(g, tickBounds, x);
}
private void checkDisabled(Graphics g) {
if (!slider.isEnabled()) {
g.setColor(getDisabledTickColor());
}
}
@Override
protected void paintMajorTickForHorizSlider(Graphics g, Rectangle tickBounds, int x) {
checkDisabled(g);
super.paintMajorTickForHorizSlider(g, tickBounds, x);
}
@Override
protected void paintMinorTickForVertSlider(Graphics g, Rectangle tickBounds, int y) {
checkDisabled(g);
super.paintMinorTickForVertSlider(g, tickBounds, y);
}
@Override
protected void paintMajorTickForVertSlider(Graphics g, Rectangle tickBounds, int y) {
checkDisabled(g);
super.paintMajorTickForVertSlider(g, tickBounds, y);
}
@Override
public void paintLabels(Graphics g) {
checkDisabled(g);
super.paintLabels(g);
}
@Override
protected void paintHorizontalLabel(Graphics g, int value, Component label) {
checkDisabled(g);
super.paintHorizontalLabel(g, value, label);
}
@Override
protected void paintVerticalLabel(Graphics g, int value, Component label) {
checkDisabled(g);
super.paintVerticalLabel(g, value, label);
}
@Override
public void paintThumb(Graphics g) {
final GraphicsConfig config = GraphicsUtil.setupAAPainting(g);
Rectangle knobBounds = thumbRect;
int w = knobBounds.width;
int h = knobBounds.height;
g.translate(knobBounds.x, knobBounds.y);
if (slider.isEnabled()) {
g.setColor(slider.getBackground());
} else {
g.setColor(slider.getBackground().darker());
}
if (isPlainThumb()) {
double r = slider.getOrientation() == JSlider.HORIZONTAL ? h : w;
final Ellipse2D.Double thumb = new Ellipse2D.Double(0, 0, r, r);
final Ellipse2D.Double innerThumb = new Ellipse2D.Double(1, 1, r - 2, r - 2);
g.setColor(getThumbBorderColor());
((Graphics2D) g).fill(thumb);
g.setColor(getThumbColor());
((Graphics2D) g).fill(innerThumb);
} else if (slider.getOrientation() == JSlider.HORIZONTAL) {
int cw = w / 2;
g.setColor(getThumbBorderColor());
Polygon p = new Polygon(); //border
p.addPoint(0, 0);
p.addPoint(w - 1, 0);
p.addPoint(w - 1, h - cw);
p.addPoint(cw, h - 1);
p.addPoint(0, h - cw);
g.fillPolygon(p);
g.setColor(getThumbColor());
p = new Polygon();
p.addPoint(1, 1);
p.addPoint(w - 2, 1);
p.addPoint(w - 2, h - cw - 1);
p.addPoint(cw, h - 2);
p.addPoint(1, h - cw - 1);
g.fillPolygon(p);
} else { // vertical
int cw = h / 2;
if (slider.getComponentOrientation().isLeftToRight()) {
g.setColor(getThumbBorderColor());
Polygon p = new Polygon(); //border
p.addPoint(0, 0);
p.addPoint(w - cw, 0);
p.addPoint(w - 1, h - cw);
p.addPoint(w - cw, h - 1);
p.addPoint(0, h - 1);
g.fillPolygon(p);
g.setColor(getThumbColor());
p = new Polygon();
p.addPoint(1, 1);
p.addPoint(w - cw, 1);
p.addPoint(w - 2, h - cw);
p.addPoint(w - cw, h - 2);
p.addPoint(1, h - 2);
g.fillPolygon(p);
} else {
g.setColor(getThumbBorderColor());
Polygon p = new Polygon(); //border
p.addPoint(w - 1, 0);
p.addPoint(cw, 0);
p.addPoint(0, h - cw);
p.addPoint(cw, h - 1);
p.addPoint(w - 1 , h - 1);
g.fillPolygon(p);
g.setColor(getThumbColor());
p = new Polygon();
p.addPoint(w - 2, 1);
p.addPoint(cw + 1, 1);
p.addPoint(1, h - cw);
p.addPoint(cw + 1, h - 2);
p.addPoint(w - 2, h - 2);
g.fillPolygon(p);
}
}
g.translate(-knobBounds.x, -knobBounds.y);
config.restore();
}
@NotNull
protected Color getThumbColor() {
return slider.isEnabled() ? getSelectedTrackColor() : getDisabledTickColor();
}
@NotNull
protected Color getThumbBorderColor() {
return slider.isEnabled() ? UIManager.getColor("Slider.thumbBorderColor") : UIManager.getColor("Slider.thumbBorderColorDisabled");
}
protected boolean isPlainThumb() {
Boolean paintThumbArrowShape = (Boolean) slider.getClientProperty("Slider.paintThumbArrowShape");
return (!slider.getPaintTicks() && paintThumbArrowShape == null) ||
paintThumbArrowShape == Boolean.FALSE;
}
}
| |
package edu.stanford.nlp.util;
import java.io.*;
import java.util.*;
import java.util.concurrent.Semaphore;
/**
* An Index is a collection that maps between an Object vocabulary and a
* contiguous non-negative integer index series beginning (inclusively) at 0.
* It supports constant-time lookup in
* both directions (via <code>get(int)</code> and <code>indexOf(E)</code>.
* The <code>indexOf(E)</code> method compares objects by
* <code>equals</code>, as other Collections.
* <p/>
* The typical usage would be:
* <p><code>Index index = new Index(collection);</code>
* <p> followed by
* <p><code>int i = index.indexOf(object);</code>
* <p> or
* <p><code>Object o = index.get(i);</code>
* <p>The source contains a concrete example of use as the main method.
* <p>An Index can be locked or unlocked: a locked index cannot have new
* items added to it.
*
* @author <a href="mailto:klein@cs.stanford.edu">Dan Klein</a>
* @version 1.0
* @see AbstractCollection
* @since 1.0
* @author <a href="mailto:yeh1@stanford.edu">Eric Yeh</a> (added write to/load from buffer)
*/
public class HashIndex<E> extends AbstractCollection<E> implements Index<E>, RandomAccess {
// these variables are also used in IntArrayIndex
ArrayList<E> objects = new ArrayList<E>();
HashMap<E,Integer> indexes = new HashMap<E,Integer>();
boolean locked; // = false;
/**
* Clears this Index.
*/
@Override
public void clear() {
objects.clear();
indexes.clear();
}
/**
* Returns the index of each elem in a List.
* @param elems The list of items
* @return An array of indices
*/
public int[] indices(Collection<E> elems) {
int[] indices = new int[elems.size()];
int i = 0;
for (E elem : elems) {
indices[i++] = indexOf(elem);
}
return indices;
}
/**
* Looks up the objects corresponding to an array of indices, and returns them in a {@link Collection}.
* This collection is not a copy, but accesses the data structures of the Index.
*
* @param indices An array of indices
* @return a {@link Collection} of the objects corresponding to the indices argument.
*/
public Collection<E> objects(final int[] indices) {
return new AbstractList<E>() {
@Override
public E get(int index) {
return objects.get(indices[index]);
}
@Override
public int size() {
return indices.length;
}
};
}
/**
* Returns the number of indexed objects.
*
* @return the number of indexed objects.
*/
@Override
public int size() {
return objects.size();
}
/**
* Gets the object whose index is the integer argument.
*
* @param i the integer index to be queried for the corresponding argument
* @return the object whose index is the integer argument.
*/
public E get(int i) {
if (i < 0 || i >= objects.size())
throw new ArrayIndexOutOfBoundsException("Index " + i +
" outside the bounds [0," +
size() + ")");
return objects.get(i);
}
/**
* Returns a complete {@link List} of indexed objects, in the order of their indices. <b>DANGER!</b>
* The current implementation returns the actual index list, not a defensive copy. Messing with this List
* can seriously screw up the state of the Index. (perhaps this method needs to be eliminated? I don't think it's
* ever used in ways that we couldn't use the Index itself for directly. --Roger, 12/29/04)
*
* @return a complete {@link List} of indexed objects
*/
public List<E> objectsList() {
return objects;
}
/**
* Queries the Index for whether it's locked or not.
* @return whether or not the Index is locked
*/
public boolean isLocked() {
return locked;
}
/** Locks the Index. A locked index cannot have new elements added to it (calls to {@link #add} will
* leave the Index unchanged and return <code>false</code>).*/
public void lock() {
locked = true;
}
/** Unlocks the Index. A locked index cannot have new elements added to it (calls to {@link #add} will
* leave the Index unchanged and return <code>false</code>).*/
public void unlock() {
locked = false;
}
/**
* Returns the integer index of the Object in the Index or -1 if the Object is not already in the Index.
* @param o the Object whose index is desired.
* @return the index of the Object argument. Returns -1 if the object is not in the index.
*/
public int indexOf(E o) {
return indexOf(o, false);
}
/**
* Takes an Object and returns the integer index of the Object,
* perhaps adding it to the index first.
* Returns -1 if the Object is not in the Index.
* <p>
* <i>Notes:</i> The method indexOf(x, true) is the direct replacement for
* the number(x) method in the old Numberer class. This method now uses a
* Semaphore object to make the index safe for concurrent multithreaded
* usage. (CDM: Is this better than using a synchronized block?)
*
* @param o the Object whose index is desired.
* @param add Whether it is okay to add new items to the index
* @return The index of the Object argument. Returns -1 if the object is not in the index.
*/
public int indexOf(E o, boolean add) {
Integer index = indexes.get(o);
if (index == null) {
if (add && ! locked) {
try {
semaphore.acquire();
index = indexes.get(o);
if (index == null) {
index = objects.size();
objects.add(o);
indexes.put(o, index);
}
semaphore.release();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
} else {
return -1;
}
}
return index;
}
private final Semaphore semaphore = new Semaphore(1);
// TODO: delete this because we can leach off of Abstract Collection
/**
* Adds every member of Collection to the Index. Does nothing for members already in the Index.
*
* @return true if some item was added to the index and false if no
* item was already in the index or if the index is locked
*/
@Override
public boolean addAll(Collection<? extends E> c) {
boolean changed = false;
for (E element: c) {
changed |= add(element);
//changed &= add(element);
}
return changed;
}
/**
* Adds an object to the Index. If it was already in the Index,
* then nothing is done. If it is not in the Index, then it is
* added iff the Index hasn't been locked.
*
* @return true if the item was added to the index and false if the
* item was already in the index or if the index is locked
*/
@Override
public boolean add(E o) {
Integer index = indexes.get(o);
if (index == null && ! locked) {
index = objects.size();
objects.add(o);
indexes.put(o, index);
return true;
}
return false;
}
/**
* Checks whether an Object already has an index in the Index
* @param o the object to be queried.
* @return true iff there is an index for the queried object.
*/
@SuppressWarnings({"SuspiciousMethodCalls"})
@Override
public boolean contains(Object o) {
return indexes.containsKey(o);
}
/**
* Creates a new Index.
*/
public HashIndex() {
super();
}
/**
* Creates a new Index.
* @param capacity Initial capacity of Index.
*/
public HashIndex(int capacity) {
super();
objects = new ArrayList<E>(capacity);
indexes = new HashMap<E,Integer>(capacity);
}
/**
* Creates a new Index and adds every member of c to it.
* @param c A collection of objects
*/
public HashIndex(Collection<? extends E> c) {
this();
addAll(c);
}
public HashIndex(Index<? extends E> index) {
this();
addAll(index.objectsList());
}
public void saveToFilename(String file) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(file));
for (int i = 0, sz = size(); i < sz; i++) {
bw.write(i + "=" + get(i) + '\n');
}
bw.close();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (bw != null) {
try {
bw.close();
} catch (IOException ioe) {
// give up
}
}
}
}
/**
* This assumes each line is of the form (number=value) and it adds each value in order of the lines in the file
* @param file
* @return
*/
public static Index<String> loadFromFilename(String file) {
Index<String> index = new HashIndex<String>();
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(file));
for (String line; (line = br.readLine()) != null; ) {
int start = line.indexOf('=');
if (start == -1 || start == line.length() - 1) {
continue;
}
index.add(line.substring(start + 1));
}
br.close();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (br != null) {
try {
br.close();
} catch (IOException ioe) {
// forget it
}
}
}
return index;
}
/**
* This saves the contents of this index into string form, as part of a larger
* text-serialization. This is not intended to act as a standalone routine,
* instead being called from the text-serialization routine for a component
* that makes use of an Index, so everything can be stored in one file. This is
* similar to <code>saveToFileName</code>.
* @param bw Writer to save to.
* @throws IOException Exception thrown if cannot save.
*/
public void saveToWriter(Writer bw) throws IOException {
for (int i = 0, sz = size(); i < sz; i++) {
bw.write(i + "=" + get(i) + '\n');
}
}
/**
* This is the analogue of <code>loadFromFilename</code>, and is intended to be included in a routine
* that unpacks a text-serialized form of an object that incorporates an Index.
* NOTE: presumes that the next readLine() will read in the first line of the
* portion of the text file representing the saved Index. Currently reads until it
* encounters a blank line, consuming that line and returning the Index.
* TODO: figure out how best to terminate: currently a blank line is considered to be a terminator.
* @param br The Reader to read the index from
* @return An Index read from a file
*/
public static Index<String> loadFromReader(BufferedReader br) throws IOException {
HashIndex<String> index = new HashIndex<String>();
String line = br.readLine();
// terminate if EOF reached, or if a blank line is encountered.
while ((line != null) && (line.length() > 0)) {
int start = line.indexOf('=');
if (start == -1 || start == line.length() - 1) {
continue;
}
index.add(line.substring(start + 1));
line = br.readLine();
}
return index;
}
/** Returns a readable version of the Index contents
*
* @return A String showing the full index contents
*/
@Override
public String toString() {
return toString(Integer.MAX_VALUE);
}
public String toStringOneEntryPerLine() {
return toStringOneEntryPerLine(Integer.MAX_VALUE);
}
/** Returns a readable version of at least part of the Index contents.
*
* @param n Show the first <i>n</i> items in the Index
* @return A String showing some of the index contents
*/
public String toString(int n) {
StringBuilder buff = new StringBuilder("[");
int sz = objects.size();
if (n > sz) {
n = sz;
}
int i;
for (i = 0; i < n; i++) {
E e = objects.get(i);
buff.append(i).append('=').append(e);
if (i < (sz-1)) buff.append(',');
}
if (i < sz) buff.append("...");
buff.append(']');
return buff.toString();
}
public String toStringOneEntryPerLine(int n) {
StringBuilder buff = new StringBuilder();
int sz = objects.size();
if (n > sz) {
n = sz;
}
int i;
for (i = 0; i < n; i++) {
E e = objects.get(i);
buff.append(e);
if (i < (sz-1)) buff.append('\n');
}
if (i < sz) buff.append("...");
return buff.toString();
}
private static final long serialVersionUID = 5398562825928375260L;
/**
* Returns an iterator over the elements of the collection.
* @return An iterator over the objects indexed
*/
@Override
public Iterator<E> iterator() {
return objects.iterator();
}
/**
* Returns an unmodifiable view of the Index. It is just
* a locked index that cannot be unlocked, so if you
* try to add something, nothing will happen (it won't throw
* an exception). Trying to unlock it will throw an
* UnsupportedOperationException. If the
* underlying Index is modified, the change will
* "write-through" to the view.
*
* @return An unmodifiable view of the Index
*/
public HashIndex<E> unmodifiableView() {
HashIndex<E> newIndex = new HashIndex<E>() {
@Override
public void unlock() { throw new UnsupportedOperationException("This is an unmodifiable view!"); }
private static final long serialVersionUID = 3415903369787491736L;
};
newIndex.objects = objects;
newIndex.indexes = indexes;
newIndex.lock();
return newIndex;
}
@Override
public boolean remove(Object o){
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> e){
throw new UnsupportedOperationException();
}
/**
* This assumes each line is one value and creates index by adding values in the order of the lines in the file
* @param file
* @return
*/
public static Index<String> loadFromFileWithList(String file) {
Index<String> index = new HashIndex<String>();
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(file));
for (String line; (line = br.readLine()) != null; ) {
index.add(line.trim());
}
br.close();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (br != null) {
try {
br.close();
} catch (IOException ioe) {
// forget it
}
}
}
return index;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query.continuous;
import java.io.NotSerializableException;
import java.util.concurrent.Callable;
import java.util.concurrent.ThreadLocalRandom;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.MutableEntry;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.marshaller.optimized.OptimizedMarshaller;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.PRIMARY_SYNC;
import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC;
import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED;
import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
import static org.apache.ignite.transactions.TransactionIsolation.SERIALIZABLE;
/**
*
*/
public class CacheEntryProcessorNonSerializableTest extends GridCommonAbstractTest {
/** */
private static final int EXPECTED_VALUE = 42;
/** */
private static final int WRONG_VALUE = -1;
/** */
private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private static final int NODES = 3;
/** */
public static final int ITERATION_CNT = 1;
/** */
public static final int KEYS = 10;
/** */
private boolean client;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder);
((TcpCommunicationSpi)cfg.getCommunicationSpi()).setSharedMemoryPort(-1);
cfg.setClientMode(client);
cfg.setMarshaller(new OptimizedMarshaller());
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
startGridsMultiThreaded(getServerNodeCount());
client = true;
startGrid(getServerNodeCount());
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
super.afterTestsStopped();
}
/**
* @return Server nodes.
*/
private int getServerNodeCount() {
return NODES;
}
/**
* @throws Exception If failed.
*/
public void testPessimisticOnePhaseCommit() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(PRIMARY_SYNC, 1);
doTestInvokeTest(ccfg, PESSIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, PESSIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, PESSIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testPessimisticOnePhaseCommitWithNearCache() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(PRIMARY_SYNC, 1)
.setNearConfiguration(new NearCacheConfiguration());
doTestInvokeTest(ccfg, PESSIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, PESSIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, PESSIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testPessimisticOnePhaseCommitFullSync() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(FULL_SYNC, 1);
doTestInvokeTest(ccfg, PESSIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, PESSIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, PESSIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testPessimisticOnePhaseCommitFullSyncWithNearCache() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(FULL_SYNC, 1)
.setNearConfiguration(new NearCacheConfiguration());
doTestInvokeTest(ccfg, PESSIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, PESSIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, PESSIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testPessimistic() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(PRIMARY_SYNC, 2);
doTestInvokeTest(ccfg, PESSIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, PESSIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, PESSIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testPessimisticWithNearCache() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(PRIMARY_SYNC, 2)
.setNearConfiguration(new NearCacheConfiguration());
doTestInvokeTest(ccfg, PESSIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, PESSIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, PESSIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testPessimisticFullSync() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(FULL_SYNC, 2);
doTestInvokeTest(ccfg, PESSIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, PESSIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, PESSIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testOptimisticOnePhaseCommit() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(PRIMARY_SYNC, 1);
doTestInvokeTest(ccfg, OPTIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, OPTIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, OPTIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testOptimisticOnePhaseCommitFullSync() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(FULL_SYNC, 1);
doTestInvokeTest(ccfg, OPTIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, OPTIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, OPTIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testOptimisticOnePhaseCommitFullSyncWithNearCache() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(FULL_SYNC, 1)
.setNearConfiguration(new NearCacheConfiguration());
doTestInvokeTest(ccfg, OPTIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, OPTIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, OPTIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testOptimistic() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(PRIMARY_SYNC, 2);
doTestInvokeTest(ccfg, OPTIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, OPTIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, OPTIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testOptimisticFullSync() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(FULL_SYNC, 2);
doTestInvokeTest(ccfg, OPTIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, OPTIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, OPTIMISTIC, SERIALIZABLE);
}
/**
* @throws Exception If failed.
*/
public void testOptimisticFullSyncWithNearCache() throws Exception {
CacheConfiguration ccfg = cacheConfiguration(FULL_SYNC, 2);
doTestInvokeTest(ccfg, OPTIMISTIC, READ_COMMITTED);
doTestInvokeTest(ccfg, OPTIMISTIC, REPEATABLE_READ);
doTestInvokeTest(ccfg, OPTIMISTIC, SERIALIZABLE);
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void doTestInvokeTest(CacheConfiguration ccfg, TransactionConcurrency txConcurrency,
TransactionIsolation txIsolation) throws Exception {
IgniteEx cln = grid(getServerNodeCount());
grid(0).createCache(ccfg);
IgniteCache clnCache;
if (ccfg.getNearConfiguration() != null)
clnCache = cln.createNearCache(ccfg.getName(), ccfg.getNearConfiguration());
else
clnCache = cln.cache(ccfg.getName());
putKeys(clnCache, EXPECTED_VALUE);
try {
// Explicit tx.
for (int i = 0; i < ITERATION_CNT; i++) {
try (final Transaction tx = cln.transactions().txStart(txConcurrency, txIsolation)) {
putKeys(clnCache, WRONG_VALUE);
clnCache.invoke(KEYS, new NonSerialazibleEntryProcessor());
GridTestUtils.assertThrowsWithCause(new Callable<Object>() {
@Override public Object call() throws Exception {
tx.commit();
return null;
}
}, NotSerializableException.class);
}
checkKeys(clnCache, EXPECTED_VALUE);
}
// From affinity node.
Ignite grid = grid(ThreadLocalRandom.current().nextInt(NODES));
final IgniteCache cache = grid.cache(ccfg.getName());
// Explicit tx.
for (int i = 0; i < ITERATION_CNT; i++) {
try (final Transaction tx = grid.transactions().txStart(txConcurrency, txIsolation)) {
putKeys(cache, WRONG_VALUE);
cache.invoke(KEYS, new NonSerialazibleEntryProcessor());
GridTestUtils.assertThrowsWithCause(new Callable<Object>() {
@Override public Object call() throws Exception {
tx.commit();
return null;
}
}, NotSerializableException.class);
}
checkKeys(cache, EXPECTED_VALUE);
}
final IgniteCache clnCache0 = clnCache;
// Implicit tx.
for (int i = 0; i < ITERATION_CNT; i++) {
GridTestUtils.assertThrowsWithCause(new Callable<Object>() {
@Override public Object call() throws Exception {
clnCache0.invoke(KEYS, new NonSerialazibleEntryProcessor());
return null;
}
}, NotSerializableException.class);
}
checkKeys(clnCache, EXPECTED_VALUE);
}
finally {
grid(0).destroyCache(ccfg.getName());
}
}
/**
* @param cache Cache.
* @param val Value.
*/
private void putKeys(IgniteCache cache, int val) {
cache.put(KEYS, val);
}
/**
* @param cache Cache.
* @param expVal Expected value.
*/
private void checkKeys(IgniteCache cache, int expVal) {
assertEquals(expVal, cache.get(KEYS));
}
/**
* @return Cache configuration.
*/
private CacheConfiguration cacheConfiguration(CacheWriteSynchronizationMode wrMode, int backup) {
return new CacheConfiguration("test-cache-" + wrMode + "-" + backup)
.setAtomicityMode(TRANSACTIONAL)
.setWriteSynchronizationMode(FULL_SYNC)
.setBackups(backup);
}
/**
*
*/
private static class NonSerialazibleEntryProcessor implements EntryProcessor<Integer, Integer, Integer> {
/** {@inheritDoc} */
@Override public Integer process(MutableEntry<Integer, Integer> entry, Object... arguments)
throws EntryProcessorException {
entry.setValue(42);
return null;
}
}
}
| |
/**
* Copyright 2011 Noa Resare
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bushstar.htmlcoinj.core;
import com.bushstar.htmlcoinj.params.MainNetParams;
import org.junit.Test;
import org.spongycastle.util.encoders.Hex;
import java.io.ByteArrayOutputStream;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import static org.junit.Assert.*;
public class HTMLcoinSerializerTest {
private final byte[] addrMessage = Hex.decode("f9beb4d96164647200000000000000001f000000" +
"ed52399b01e215104d010000000000000000000000000000000000ffff0a000001208d");
private final byte[] txMessage = Hex.decode(
"F9 BE B4 D9 74 78 00 00 00 00 00 00 00 00 00 00" +
"02 01 00 00 E2 93 CD BE 01 00 00 00 01 6D BD DB" +
"08 5B 1D 8A F7 51 84 F0 BC 01 FA D5 8D 12 66 E9" +
"B6 3B 50 88 19 90 E4 B4 0D 6A EE 36 29 00 00 00" +
"00 8B 48 30 45 02 21 00 F3 58 1E 19 72 AE 8A C7" +
"C7 36 7A 7A 25 3B C1 13 52 23 AD B9 A4 68 BB 3A" +
"59 23 3F 45 BC 57 83 80 02 20 59 AF 01 CA 17 D0" +
"0E 41 83 7A 1D 58 E9 7A A3 1B AE 58 4E DE C2 8D" +
"35 BD 96 92 36 90 91 3B AE 9A 01 41 04 9C 02 BF" +
"C9 7E F2 36 CE 6D 8F E5 D9 40 13 C7 21 E9 15 98" +
"2A CD 2B 12 B6 5D 9B 7D 59 E2 0A 84 20 05 F8 FC" +
"4E 02 53 2E 87 3D 37 B9 6F 09 D6 D4 51 1A DA 8F" +
"14 04 2F 46 61 4A 4C 70 C0 F1 4B EF F5 FF FF FF" +
"FF 02 40 4B 4C 00 00 00 00 00 19 76 A9 14 1A A0" +
"CD 1C BE A6 E7 45 8A 7A BA D5 12 A9 D9 EA 1A FB" +
"22 5E 88 AC 80 FA E9 C7 00 00 00 00 19 76 A9 14" +
"0E AB 5B EA 43 6A 04 84 CF AB 12 48 5E FD A0 B7" +
"8B 4E CC 52 88 AC 00 00 00 00");
@Test
public void testAddr() throws Exception {
HTMLcoinSerializer bs = new HTMLcoinSerializer(MainNetParams.get());
// the actual data from https://en.htmlcoin.it/wiki/Protocol_specification#addr
AddressMessage a = (AddressMessage)bs.deserialize(ByteBuffer.wrap(addrMessage));
assertEquals(1, a.getAddresses().size());
PeerAddress pa = a.getAddresses().get(0);
assertEquals(8333, pa.getPort());
assertEquals("10.0.0.1", pa.getAddr().getHostAddress());
ByteArrayOutputStream bos = new ByteArrayOutputStream(addrMessage.length);
bs.serialize(a, bos);
//this wont be true due to dynamic timestamps.
//assertTrue(LazyParseByteCacheTest.arrayContains(bos.toByteArray(), addrMessage));
}
@Test
public void testLazyParsing() throws Exception {
HTMLcoinSerializer bs = new HTMLcoinSerializer(MainNetParams.get(), true, false);
Transaction tx = (Transaction)bs.deserialize(ByteBuffer.wrap(txMessage));
assertNotNull(tx);
assertEquals(false, tx.isParsed());
assertEquals(true, tx.isCached());
tx.getInputs();
assertEquals(true, tx.isParsed());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bs.serialize(tx, bos);
assertEquals(true, Arrays.equals(txMessage, bos.toByteArray()));
}
@Test
public void testCachedParsing() throws Exception {
testCachedParsing(true);
testCachedParsing(false);
}
private void testCachedParsing(boolean lazy) throws Exception {
HTMLcoinSerializer bs = new HTMLcoinSerializer(MainNetParams.get(), lazy, true);
//first try writing to a fields to ensure uncaching and children are not affected
Transaction tx = (Transaction)bs.deserialize(ByteBuffer.wrap(txMessage));
assertNotNull(tx);
assertEquals(!lazy, tx.isParsed());
assertEquals(true, tx.isCached());
tx.setLockTime(1);
//parent should have been uncached
assertEquals(false, tx.isCached());
//child should remain cached.
assertEquals(true, tx.getInputs().get(0).isCached());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bs.serialize(tx, bos);
assertEquals(true, !Arrays.equals(txMessage, bos.toByteArray()));
//now try writing to a child to ensure uncaching is propagated up to parent but not to siblings
tx = (Transaction)bs.deserialize(ByteBuffer.wrap(txMessage));
assertNotNull(tx);
assertEquals(!lazy, tx.isParsed());
assertEquals(true, tx.isCached());
tx.getInputs().get(0).setSequenceNumber(1);
//parent should have been uncached
assertEquals(false, tx.isCached());
//so should child
assertEquals(false, tx.getInputs().get(0).isCached());
bos = new ByteArrayOutputStream();
bs.serialize(tx, bos);
assertEquals(true, !Arrays.equals(txMessage, bos.toByteArray()));
//deserialize/reserialize to check for equals.
tx = (Transaction)bs.deserialize(ByteBuffer.wrap(txMessage));
assertNotNull(tx);
assertEquals(!lazy, tx.isParsed());
assertEquals(true, tx.isCached());
bos = new ByteArrayOutputStream();
bs.serialize(tx, bos);
assertEquals(true, Arrays.equals(txMessage, bos.toByteArray()));
//deserialize/reserialize to check for equals. Set a field to it's existing value to trigger uncache
tx = (Transaction)bs.deserialize(ByteBuffer.wrap(txMessage));
assertNotNull(tx);
assertEquals(!lazy, tx.isParsed());
assertEquals(true, tx.isCached());
tx.getInputs().get(0).setSequenceNumber(tx.getInputs().get(0).getSequenceNumber());
bos = new ByteArrayOutputStream();
bs.serialize(tx, bos);
assertEquals(true, Arrays.equals(txMessage, bos.toByteArray()));
}
/**
* Get 1 header of the block number 1 (the first one is 0) in the chain
*/
@Test
public void testHeaders1() throws Exception {
HTMLcoinSerializer bs = new HTMLcoinSerializer(MainNetParams.get());
HeadersMessage hm = (HeadersMessage) bs.deserialize(ByteBuffer.wrap(Hex.decode("f9beb4d9686561" +
"646572730000000000520000005d4fab8101010000006fe28c0ab6f1b372c1a6a246ae6" +
"3f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677b" +
"a1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e3629900")));
// The first block after the genesis
// http://blockexplorer.com/b/1
Block block = hm.getBlockHeaders().get(0);
String hash = block.getHashAsString();
assertEquals(hash, "00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048");
assertNull(block.transactions);
assertEquals(Utils.bytesToHexString(block.getMerkleRoot().getBytes()),
"0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098");
}
@Test
/**
* Get 6 headers of blocks 1-6 in the chain
*/
public void testHeaders2() throws Exception {
HTMLcoinSerializer bs = new HTMLcoinSerializer(MainNetParams.get());
HeadersMessage hm = (HeadersMessage) bs.deserialize(ByteBuffer.wrap(Hex.decode("f9beb4d96865616465" +
"72730000000000e701000085acd4ea06010000006fe28c0ab6f1b372c1a6a246ae63f74f931e" +
"8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1c" +
"db606e857233e0e61bc6649ffff001d01e3629900010000004860eb18bf1b1620e37e9490fc8a" +
"427514416fd75159ab86688e9a8300000000d5fdcc541e25de1c7a5addedf24858b8bb665c9f36" +
"ef744ee42c316022c90f9bb0bc6649ffff001d08d2bd610001000000bddd99ccfda39da1b108ce1" +
"a5d70038d0a967bacb68b6b63065f626a0000000044f672226090d85db9a9f2fbfe5f0f9609b387" +
"af7be5b7fbb7a1767c831c9e995dbe6649ffff001d05e0ed6d00010000004944469562ae1c2c74" +
"d9a535e00b6f3e40ffbad4f2fda3895501b582000000007a06ea98cd40ba2e3288262b28638cec" +
"5337c1456aaf5eedc8e9e5a20f062bdf8cc16649ffff001d2bfee0a9000100000085144a84488e" +
"a88d221c8bd6c059da090e88f8a2c99690ee55dbba4e00000000e11c48fecdd9e72510ca84f023" +
"370c9a38bf91ac5cae88019bee94d24528526344c36649ffff001d1d03e4770001000000fc33f5" +
"96f822a0a1951ffdbf2a897b095636ad871707bf5d3162729b00000000379dfb96a5ea8c81700ea4" +
"ac6b97ae9a9312b2d4301a29580e924ee6761a2520adc46649ffff001d189c4c9700")));
int nBlocks = hm.getBlockHeaders().size();
assertEquals(nBlocks, 6);
// index 0 block is the number 1 block in the block chain
// http://blockexplorer.com/b/1
Block zeroBlock = hm.getBlockHeaders().get(0);
String zeroBlockHash = zeroBlock.getHashAsString();
assertEquals("00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048",
zeroBlockHash);
assertEquals(zeroBlock.getNonce(), 2573394689L);
Block thirdBlock = hm.getBlockHeaders().get(3);
String thirdBlockHash = thirdBlock.getHashAsString();
// index 3 block is the number 4 block in the block chain
// http://blockexplorer.com/b/4
assertEquals("000000004ebadb55ee9096c9a2f8880e09da59c0d68b1c228da88e48844a1485",
thirdBlockHash);
assertEquals(thirdBlock.getNonce(), 2850094635L);
}
@Test
public void testHTMLcoinPacketHeader() {
try {
new HTMLcoinSerializer.HTMLcoinPacketHeader(ByteBuffer.wrap(new byte[]{0}));
fail();
} catch (BufferUnderflowException e) {
}
// Message with a Message size which is 1 too big, in little endian format.
byte[] wrongMessageLength = Hex.decode("000000000000000000000000010000020000000000");
try {
new HTMLcoinSerializer.HTMLcoinPacketHeader(ByteBuffer.wrap(wrongMessageLength));
fail();
} catch (ProtocolException e) {
// expected
}
}
@Test
public void testSeekPastMagicBytes() {
// Fail in another way, there is data in the stream but no magic bytes.
byte[] brokenMessage = Hex.decode("000000");
try {
new HTMLcoinSerializer(MainNetParams.get()).seekPastMagicBytes(ByteBuffer.wrap(brokenMessage));
fail();
} catch (BufferUnderflowException e) {
// expected
}
}
@Test
/**
* Tests serialization of an unknown message.
*/
public void testSerializeUnknownMessage() {
HTMLcoinSerializer bs = new HTMLcoinSerializer(MainNetParams.get());
UnknownMessage a = new UnknownMessage();
ByteArrayOutputStream bos = new ByteArrayOutputStream(addrMessage.length);
try {
bs.serialize(a, bos);
fail();
} catch (Throwable e) {
}
}
/**
* Unknown message for testSerializeUnknownMessage.
*/
class UnknownMessage extends Message {
@Override
void parse() throws ProtocolException {
}
@Override
protected void parseLite() throws ProtocolException {
}
}
}
| |
package org.motechproject.openmrs19.service.impl;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
import org.motechproject.event.MotechEvent;
import org.motechproject.event.listener.EventRelay;
import org.motechproject.openmrs19.domain.OpenMRSConcept;
import org.motechproject.openmrs19.domain.OpenMRSFacility;
import org.motechproject.openmrs19.domain.OpenMRSPatient;
import org.motechproject.openmrs19.domain.OpenMRSPerson;
import org.motechproject.openmrs19.exception.HttpException;
import org.motechproject.openmrs19.exception.OpenMRSException;
import org.motechproject.openmrs19.exception.PatientNotFoundException;
import org.motechproject.openmrs19.helper.EventHelper;
import org.motechproject.openmrs19.resource.PatientResource;
import org.motechproject.openmrs19.resource.model.Identifier;
import org.motechproject.openmrs19.resource.model.Patient;
import org.motechproject.openmrs19.resource.model.PatientListResult;
import org.motechproject.openmrs19.service.EventKeys;
import org.motechproject.openmrs19.service.OpenMRSFacilityService;
import org.motechproject.openmrs19.service.OpenMRSPatientService;
import org.motechproject.openmrs19.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service("patientService")
public class OpenMRSPatientServiceImpl implements OpenMRSPatientService {
private static final Logger LOGGER = LoggerFactory.getLogger(OpenMRSPatientServiceImpl.class);
private final PatientResource patientResource;
private final OpenMRSPersonServiceImpl personAdapter;
private final OpenMRSFacilityService facilityAdapter;
private final EventRelay eventRelay;
@Autowired
public OpenMRSPatientServiceImpl(PatientResource patientResource, OpenMRSPersonServiceImpl personAdapter,
OpenMRSFacilityService facilityAdapter, EventRelay eventRelay) {
this.patientResource = patientResource;
this.personAdapter = personAdapter;
this.facilityAdapter = facilityAdapter;
this.eventRelay = eventRelay;
}
@Override
public OpenMRSPatient getPatientByMotechId(String motechId) {
Validate.notEmpty(motechId, "Motech Id cannot be empty");
PatientListResult patientList;
try {
patientList = patientResource.queryForPatient(motechId);
} catch (HttpException e) {
LOGGER.error("Failed search for patient by MoTeCH Id: " + motechId);
return null;
}
if (patientList.getResults().size() == 0) {
return null;
} else if (patientList.getResults().size() > 1) {
LOGGER.warn("Search for patient by id returned more than 1 result");
}
return getPatientByUuid(patientList.getResults().get(0).getUuid());
}
@Override
public OpenMRSPatient getPatientByUuid(String uuid) {
Validate.notEmpty(uuid, "Patient Id cannot be empty");
Patient patient;
try {
patient = patientResource.getPatientById(uuid);
} catch (HttpException e) {
LOGGER.error("Failed to get patient by id: " + uuid);
return null;
}
String motechIdentifierUuid;
try {
motechIdentifierUuid = patientResource.getMotechPatientIdentifierUuid();
} catch (HttpException e) {
LOGGER.error("There was an exception retrieving the MoTeCH Identifier Type UUID");
return null;
}
String motechId = null;
OpenMRSFacility facility = null;
Identifier motechIdentifier = patient.getIdentifierByIdentifierType(motechIdentifierUuid);
if (motechIdentifier == null) {
LOGGER.warn("No MoTeCH Id found on Patient with id: " + patient.getUuid());
} else {
if (motechIdentifier.getLocation() != null) {
facility = facilityAdapter.getFacilityByUuid(motechIdentifier.getLocation().getUuid());
}
motechId = motechIdentifier.getIdentifier();
}
List<Identifier> supportedIdentifierTypeList = getSupportedIdentifierTypeList(patient.getIdentifiers(), motechIdentifierUuid);
return ConverterUtils.toOpenMRSPatient(patient, facility, motechId, supportedIdentifierTypeList);
}
@Override
public OpenMRSPatient createPatient(OpenMRSPatient patient) {
validatePatientBeforeSave(patient);
OpenMRSPatient openMRSPatient = ConverterUtils.copyPatient(patient);
OpenMRSPerson person;
if (patient.getPerson().getId() == null) {
person = personAdapter.createPerson(patient.getPerson());
} else {
person = patient.getPerson();
}
Map<String, String> parsedPatientIdentifiers = parsePatientIdentifiers(openMRSPatient.getIdentifiers());
Patient converted = ConverterUtils.toPatient(openMRSPatient, person, getMotechPatientIdentifierTypeUuid(), parsedPatientIdentifiers);
try {
OpenMRSPatient savedPatient = ConverterUtils.toOpenMRSPatient(patientResource.createPatient(converted));
eventRelay.sendEventMessage(new MotechEvent(EventKeys.CREATED_NEW_PATIENT_SUBJECT, EventHelper.patientParameters(savedPatient)));
return savedPatient;
} catch (HttpException e) {
LOGGER.error("Failed to create a patient in OpenMRS with MoTeCH Id: " + patient.getMotechId());
return null;
}
}
private void validatePatientBeforeSave(OpenMRSPatient patient) {
Validate.notNull(patient, "Patient cannot be null");
Validate.isTrue(StringUtils.isNotEmpty(patient.getMotechId()), "You must provide a motech id to save a patient");
Validate.notNull(patient.getPerson(), "Person cannot be null when saving a patient");
Validate.notNull(patient.getFacility(), "Facility cannot be null when saving a patient");
}
@Override
public List<OpenMRSPatient> search(String name, String id) {
Validate.notEmpty(name, "Name cannot be empty");
PatientListResult result;
try {
result = patientResource.queryForPatient(name);
} catch (HttpException e) {
LOGGER.error("Failed search for patient name: " + name);
return Collections.emptyList();
}
List<OpenMRSPatient> patients = new ArrayList<>();
for (Patient partialPatient : result.getResults()) {
OpenMRSPatient patient = getPatientByUuid(partialPatient.getUuid());
if (id == null) {
patients.add(patient);
} else {
if (patient.getMotechId() != null && patient.getMotechId().contains(id)) {
patients.add(patient);
}
}
}
if (patients.size() > 0) {
sortResults(patients);
}
return patients;
}
@Override
public OpenMRSPatient updatePatient(OpenMRSPatient patient, String currentMotechId) {
if (!patient.getMotechId().equals(currentMotechId) && getPatientByMotechId(patient.getMotechId()) != null) {
throw new OpenMRSException("Patient with Motech ID" + patient.getMotechId() + "already exists.");
}
return updatePatient(patient);
}
@Override
public OpenMRSPatient updatePatient(OpenMRSPatient patient) {
Validate.notNull(patient, "Patient cannot be null");
Validate.notEmpty(patient.getPatientId(), "Patient Id may not be empty");
OpenMRSPatient openMRSPatient = ConverterUtils.copyPatient(patient);
OpenMRSPerson person = ConverterUtils.copyPerson(patient.getPerson());
personAdapter.updatePerson(person);
// the openmrs web service requires an explicit delete request to remove
// attributes. delete all previous attributes, and then
// create any attributes attached to the patient
personAdapter.deleteAllAttributes(person);
personAdapter.saveAttributesForPerson(person);
try {
patientResource.updatePatientMotechId(patient.getPatientId(), patient.getMotechId());
} catch (HttpException e) {
throw new OpenMRSException("Failed to update OpenMRS patient with id: " + patient.getPatientId(), e);
}
OpenMRSPatient updatedPatient = new OpenMRSPatient(openMRSPatient.getPatientId(), patient.getMotechId(), person, openMRSPatient.getFacility(), null);
eventRelay.sendEventMessage(new MotechEvent(EventKeys.UPDATED_PATIENT_SUBJECT, EventHelper.patientParameters(updatedPatient)));
return updatedPatient;
}
@Override
public void deceasePatient(String motechId, OpenMRSConcept causeOfDeath, Date dateOfDeath, String comment)
throws PatientNotFoundException {
Validate.notEmpty(motechId, "MoTeCh id cannot be empty");
OpenMRSPatient patient = getPatientByMotechId(motechId);
if (patient == null) {
throw new PatientNotFoundException("Cannot decease patient because no patient found with Motech Id: " + motechId);
}
personAdapter.savePersonCauseOfDeath(patient.getPatientId(), dateOfDeath, ConverterUtils.toConcept(causeOfDeath));
eventRelay.sendEventMessage(new MotechEvent(EventKeys.PATIENT_DECEASED_SUBJECT, EventHelper.patientParameters(patient)));
}
@Override
public void deletePatient(String uuid) throws PatientNotFoundException {
try {
patientResource.deletePatient(uuid);
eventRelay.sendEventMessage(new MotechEvent(EventKeys.DELETED_PATIENT_SUBJECT, EventHelper.patientParameters(new OpenMRSPatient(uuid))));
} catch (HttpException e) {
throw new PatientNotFoundException(e);
}
}
private String getMotechPatientIdentifierTypeUuid() {
String motechPatientIdentifierTypeUuid;
try {
motechPatientIdentifierTypeUuid = patientResource.getMotechPatientIdentifierUuid();
} catch (HttpException e) {
LOGGER.error("There was an exception retrieving the MoTeCH Identifier Type UUID");
return null;
}
if (StringUtils.isBlank(motechPatientIdentifierTypeUuid)) {
LOGGER.error("Cannot save a patient until a MoTeCH Patient Identifier Type is created in the OpenMRS");
return null;
}
return motechPatientIdentifierTypeUuid;
}
private void sortResults(List<OpenMRSPatient> searchResults) {
Collections.sort(searchResults, new Comparator<OpenMRSPatient>() {
@Override
public int compare(OpenMRSPatient patient1, OpenMRSPatient patient2) {
if (StringUtils.isNotEmpty(patient1.getMotechId()) && StringUtils.isNotEmpty(patient2.getMotechId())) {
return patient1.getMotechId().compareTo(patient2.getMotechId());
} else if (StringUtils.isNotEmpty(patient1.getMotechId())) {
return -1;
} else if (StringUtils.isNotEmpty(patient2.getMotechId())) {
return 1;
}
return 0;
}
});
}
private List<Identifier> getSupportedIdentifierTypeList(List<Identifier> patientIdentifierList, String motechIdentifierUuid) {
List<Identifier> supportedIdentifierTypeList = new ArrayList<>();
for (Identifier identifier : patientIdentifierList) {
String identifierTypeUuid = identifier.getIdentifierType().getUuid();
// we omit motechIdentifier, because this identifier is stored differently
if (!StringUtils.equals(identifierTypeUuid, motechIdentifierUuid)) {
try {
String identifierTypeName = patientResource.getPatientIdentifierTypeNameByUuid(identifierTypeUuid);
if (identifierTypeName == null) {
LOGGER.warn("The identifier type with UUID {} is not supported", identifierTypeUuid);
} else {
identifier.getIdentifierType().setName(identifierTypeName);
supportedIdentifierTypeList.add(identifier);
}
} catch (HttpException e) {
LOGGER.error("There was an exception retrieving the identifier type with UUID {}", identifierTypeUuid);
return null;
}
}
}
return supportedIdentifierTypeList;
}
/**
* Parses patient identifiers. This method checks if the given identifier type name is supported by MOTECH
* and swaps identifier type name for identifier type uuid.
*
* @param identifiers the identifiers of patient, key - identifier type name, value - identifier number
* @return parsed patient identifiers, key - identifier type uuid, value - identifier number
*/
private Map<String, String> parsePatientIdentifiers(Map<String, String> identifiers) {
Map<String, String> parsedIdentifiers = new HashMap<>();
for (String identifierTypeName : identifiers.keySet()) {
try {
String identifierTypeUuid = patientResource.getPatientIdentifierTypeUuidByName(identifierTypeName);
if (identifierTypeUuid == null) {
LOGGER.warn("The identifier type with name {} is not supported", identifierTypeName);
} else {
parsedIdentifiers.put(identifierTypeUuid, identifiers.get(identifierTypeName));
}
} catch (HttpException e) {
LOGGER.error("There was an exception retrieving the identifier type with name {}", identifierTypeName);
return null;
}
}
return parsedIdentifiers;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import java.io.Externalizable;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.GridDirectCollection;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.distributed.GridDistributedLockRequest;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.GridLeanMap;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.apache.ignite.transactions.TransactionIsolation;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* DHT lock request.
*/
public class GridDhtLockRequest extends GridDistributedLockRequest {
/** */
private static final long serialVersionUID = 0L;
/** Near keys to lock. */
@GridToStringInclude
@GridDirectCollection(KeyCacheObject.class)
private List<KeyCacheObject> nearKeys;
/** Invalidate reader flags. */
private BitSet invalidateEntries;
/** Mini future ID. */
private IgniteUuid miniId;
/** Owner mapped version, if any. */
@GridToStringInclude
@GridDirectTransient
private Map<KeyCacheObject, GridCacheVersion> owned;
/** Array of keys from {@link #owned}. Used during marshalling and unmarshalling. */
@GridToStringExclude
private KeyCacheObject[] ownedKeys;
/** Array of values from {@link #owned}. Used during marshalling and unmarshalling. */
@GridToStringExclude
private GridCacheVersion[] ownedValues;
/** Topology version. */
private AffinityTopologyVersion topVer;
/** Subject ID. */
private UUID subjId;
/** Task name hash. */
private int taskNameHash;
/** Indexes of keys needed to be preloaded. */
private BitSet preloadKeys;
/** TTL for read operation. */
private long accessTtl;
/**
* Empty constructor required for {@link Externalizable}.
*/
public GridDhtLockRequest() {
// No-op.
}
/**
* @param cacheId Cache ID.
* @param nodeId Node ID.
* @param nearXidVer Near transaction ID.
* @param threadId Thread ID.
* @param futId Future ID.
* @param miniId Mini future ID.
* @param lockVer Cache version.
* @param topVer Topology version.
* @param isInTx {@code True} if implicit transaction lock.
* @param isRead Indicates whether implicit lock is for read or write operation.
* @param isolation Transaction isolation.
* @param isInvalidate Invalidation flag.
* @param timeout Lock timeout.
* @param dhtCnt DHT count.
* @param nearCnt Near count.
* @param txSize Expected transaction size.
* @param subjId Subject ID.
* @param taskNameHash Task name hash code.
* @param accessTtl TTL for read operation.
* @param skipStore Skip store flag.
* @param storeUsed Cache store used flag.
* @param keepBinary Keep binary flag.
* @param addDepInfo Deployment info flag.
*/
public GridDhtLockRequest(
int cacheId,
UUID nodeId,
GridCacheVersion nearXidVer,
long threadId,
IgniteUuid futId,
IgniteUuid miniId,
GridCacheVersion lockVer,
@NotNull AffinityTopologyVersion topVer,
boolean isInTx,
boolean isRead,
TransactionIsolation isolation,
boolean isInvalidate,
long timeout,
int dhtCnt,
int nearCnt,
int txSize,
@Nullable UUID subjId,
int taskNameHash,
long accessTtl,
boolean skipStore,
boolean storeUsed,
boolean keepBinary,
boolean addDepInfo
) {
super(cacheId,
nodeId,
nearXidVer,
threadId,
futId,
lockVer,
isInTx,
isRead,
isolation,
isInvalidate,
timeout,
dhtCnt == 0 ? nearCnt : dhtCnt,
txSize,
skipStore,
keepBinary,
addDepInfo);
this.topVer = topVer;
storeUsed(storeUsed);
nearKeys = nearCnt == 0 ? Collections.<KeyCacheObject>emptyList() : new ArrayList<KeyCacheObject>(nearCnt);
invalidateEntries = new BitSet(dhtCnt == 0 ? nearCnt : dhtCnt);
assert miniId != null;
this.miniId = miniId;
this.subjId = subjId;
this.taskNameHash = taskNameHash;
this.accessTtl = accessTtl;
}
/**
* @return Near node ID.
*/
public UUID nearNodeId() {
return nodeId();
}
/**
* @return Subject ID.
*/
public UUID subjectId() {
return subjId;
}
/**
* @return Task name hash.
*/
public int taskNameHash() {
return taskNameHash;
}
/**
* @return Topology version.
*/
@Override public AffinityTopologyVersion topologyVersion() {
return topVer;
}
/**
* Adds a Near key.
*
* @param key Key.
* @param ctx Context.
* @throws IgniteCheckedException If failed.
*/
public void addNearKey(KeyCacheObject key, GridCacheSharedContext ctx)
throws IgniteCheckedException {
nearKeys.add(key);
}
/**
* @return Near keys.
*/
public List<KeyCacheObject> nearKeys() {
return nearKeys == null ? Collections.<KeyCacheObject>emptyList() : nearKeys;
}
/**
* Adds a DHT key.
*
* @param key Key.
* @param invalidateEntry Flag indicating whether node should attempt to invalidate reader.
* @param ctx Context.
* @throws IgniteCheckedException If failed.
*/
public void addDhtKey(
KeyCacheObject key,
boolean invalidateEntry,
GridCacheContext ctx
) throws IgniteCheckedException {
invalidateEntries.set(idx, invalidateEntry);
addKeyBytes(key, false, ctx);
}
/**
* Marks last added key for preloading.
*/
public void markLastKeyForPreload() {
assert idx > 0;
if (preloadKeys == null)
preloadKeys = new BitSet();
preloadKeys.set(idx - 1, true);
}
/**
* @param idx Key index.
* @return {@code True} if need to preload key with given index.
*/
public boolean needPreloadKey(int idx) {
return preloadKeys != null && preloadKeys.get(idx);
}
/**
* Sets owner and its mapped version.
*
* @param key Key.
* @param ownerMapped Owner mapped version.
*/
public void owned(KeyCacheObject key, GridCacheVersion ownerMapped) {
if (owned == null)
owned = new GridLeanMap<>(3);
owned.put(key, ownerMapped);
}
/**
* @param key Key.
* @return Owner and its mapped versions.
*/
@Nullable public GridCacheVersion owned(KeyCacheObject key) {
return owned == null ? null : owned.get(key);
}
/**
* @param idx Entry index to check.
* @return {@code True} if near entry should be invalidated.
*/
public boolean invalidateNearEntry(int idx) {
return invalidateEntries.get(idx);
}
/**
* @return Mini ID.
*/
public IgniteUuid miniId() {
return miniId;
}
/**
* @return TTL for read operation.
*/
public long accessTtl() {
return accessTtl;
}
/** {@inheritDoc} */
@Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException {
super.prepareMarshal(ctx);
prepareMarshalCacheObjects(nearKeys, ctx.cacheContext(cacheId));
if (owned != null && ownedKeys == null) {
ownedKeys = new KeyCacheObject[owned.size()];
ownedValues = new GridCacheVersion[ownedKeys.length];
int i = 0;
for (Map.Entry<KeyCacheObject, GridCacheVersion> entry : owned.entrySet()) {
ownedKeys[i] = entry.getKey();
ownedValues[i] = entry.getValue();
i++;
}
}
}
/** {@inheritDoc} */
@Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException {
super.finishUnmarshal(ctx, ldr);
finishUnmarshalCacheObjects(nearKeys, ctx.cacheContext(cacheId), ldr);
if (ownedKeys != null) {
owned = new GridLeanMap<>(ownedKeys.length);
for (int i = 0; i < ownedKeys.length; i++) {
ownedKeys[i].finishUnmarshal(ctx.cacheContext(cacheId).cacheObjectContext(), ldr);
owned.put(ownedKeys[i], ownedValues[i]);
}
ownedKeys = null;
ownedValues = null;
}
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!super.writeTo(buf, writer))
return false;
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 20:
if (!writer.writeLong("accessTtl", accessTtl))
return false;
writer.incrementState();
case 21:
if (!writer.writeBitSet("invalidateEntries", invalidateEntries))
return false;
writer.incrementState();
case 22:
if (!writer.writeIgniteUuid("miniId", miniId))
return false;
writer.incrementState();
case 23:
if (!writer.writeCollection("nearKeys", nearKeys, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 24:
if (!writer.writeObjectArray("ownedKeys", ownedKeys, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 25:
if (!writer.writeObjectArray("ownedValues", ownedValues, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 26:
if (!writer.writeBitSet("preloadKeys", preloadKeys))
return false;
writer.incrementState();
case 27:
if (!writer.writeUuid("subjId", subjId))
return false;
writer.incrementState();
case 28:
if (!writer.writeInt("taskNameHash", taskNameHash))
return false;
writer.incrementState();
case 29:
if (!writer.writeMessage("topVer", topVer))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
if (!super.readFrom(buf, reader))
return false;
switch (reader.state()) {
case 20:
accessTtl = reader.readLong("accessTtl");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 21:
invalidateEntries = reader.readBitSet("invalidateEntries");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 22:
miniId = reader.readIgniteUuid("miniId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 23:
nearKeys = reader.readCollection("nearKeys", MessageCollectionItemType.MSG);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 24:
ownedKeys = reader.readObjectArray("ownedKeys", MessageCollectionItemType.MSG, KeyCacheObject.class);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 25:
ownedValues = reader.readObjectArray("ownedValues", MessageCollectionItemType.MSG, GridCacheVersion.class);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 26:
preloadKeys = reader.readBitSet("preloadKeys");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 27:
subjId = reader.readUuid("subjId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 28:
taskNameHash = reader.readInt("taskNameHash");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 29:
topVer = reader.readMessage("topVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(GridDhtLockRequest.class);
}
/** {@inheritDoc} */
@Override public short directType() {
return 30;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 30;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridDhtLockRequest.class, this, "super", super.toString());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.elasticsearch.client.Client;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.support.GroupedActionListener;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.elasticsearch.common.settings.Setting.boolSetting;
/**
* Basic service for accessing remote clusters via gateway nodes
*/
public final class RemoteClusterService extends RemoteClusterAware implements Closeable {
/**
* The maximum number of connections that will be established to a remote cluster. For instance if there is only a single
* seed node, other nodes will be discovered up to the given number of nodes in this setting. The default is 3.
*/
public static final Setting<Integer> REMOTE_CONNECTIONS_PER_CLUSTER = Setting.intSetting("search.remote.connections_per_cluster",
3, 1, Setting.Property.NodeScope);
/**
* The initial connect timeout for remote cluster connections
*/
public static final Setting<TimeValue> REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING =
Setting.positiveTimeSetting("search.remote.initial_connect_timeout", TimeValue.timeValueSeconds(30), Setting.Property.NodeScope);
/**
* The name of a node attribute to select nodes that should be connected to in the remote cluster.
* For instance a node can be configured with <tt>node.attr.gateway: true</tt> in order to be eligible as a gateway node between
* clusters. In that case <tt>search.remote.node.attr: gateway</tt> can be used to filter out other nodes in the remote cluster.
* The value of the setting is expected to be a boolean, <tt>true</tt> for nodes that can become gateways, <tt>false</tt> otherwise.
*/
public static final Setting<String> REMOTE_NODE_ATTRIBUTE = Setting.simpleString("search.remote.node.attr",
Setting.Property.NodeScope);
/**
* If <code>true</code> connecting to remote clusters is supported on this node. If <code>false</code> this node will not establish
* connections to any remote clusters configured. Search requests executed against this node (where this node is the coordinating node)
* will fail if remote cluster syntax is used as an index pattern. The default is <code>true</code>
*/
public static final Setting<Boolean> ENABLE_REMOTE_CLUSTERS = Setting.boolSetting("search.remote.connect", true,
Setting.Property.NodeScope);
public static final Setting.AffixSetting<Boolean> REMOTE_CLUSTER_SKIP_UNAVAILABLE =
Setting.affixKeySetting("search.remote.", "skip_unavailable",
key -> boolSetting(key, false, Setting.Property.NodeScope, Setting.Property.Dynamic), REMOTE_CLUSTERS_SEEDS);
private final TransportService transportService;
private final int numRemoteConnections;
private volatile Map<String, RemoteClusterConnection> remoteClusters = Collections.emptyMap();
public RemoteClusterService(Settings settings, TransportService transportService) {
super(settings);
this.transportService = transportService;
numRemoteConnections = REMOTE_CONNECTIONS_PER_CLUSTER.get(settings);
}
/**
* This method updates the list of remote clusters. It's intended to be used as an update consumer on the settings infrastructure
* @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes
* @param connectionListener a listener invoked once every configured cluster has been connected to
*/
private synchronized void updateRemoteClusters(Map<String, List<DiscoveryNode>> seeds, ActionListener<Void> connectionListener) {
if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) {
throw new IllegalArgumentException("remote clusters must not have the empty string as its key");
}
Map<String, RemoteClusterConnection> remoteClusters = new HashMap<>();
if (seeds.isEmpty()) {
connectionListener.onResponse(null);
} else {
CountDown countDown = new CountDown(seeds.size());
Predicate<DiscoveryNode> nodePredicate = (node) -> Version.CURRENT.isCompatible(node.getVersion());
if (REMOTE_NODE_ATTRIBUTE.exists(settings)) {
// nodes can be tagged with node.attr.remote_gateway: true to allow a node to be a gateway node for
// cross cluster search
String attribute = REMOTE_NODE_ATTRIBUTE.get(settings);
nodePredicate = nodePredicate.and((node) -> Booleans.parseBoolean(node.getAttributes().getOrDefault(attribute, "false")));
}
remoteClusters.putAll(this.remoteClusters);
for (Map.Entry<String, List<DiscoveryNode>> entry : seeds.entrySet()) {
RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey());
if (entry.getValue().isEmpty()) { // with no seed nodes we just remove the connection
try {
IOUtils.close(remote);
} catch (IOException e) {
logger.warn("failed to close remote cluster connections for cluster: " + entry.getKey(), e);
}
remoteClusters.remove(entry.getKey());
continue;
}
if (remote == null) { // this is a new cluster we have to add a new representation
remote = new RemoteClusterConnection(settings, entry.getKey(), entry.getValue(), transportService, numRemoteConnections,
nodePredicate);
remoteClusters.put(entry.getKey(), remote);
}
// now update the seed nodes no matter if it's new or already existing
RemoteClusterConnection finalRemote = remote;
remote.updateSeedNodes(entry.getValue(), ActionListener.wrap(
response -> {
if (countDown.countDown()) {
connectionListener.onResponse(response);
}
},
exception -> {
if (countDown.fastForward()) {
connectionListener.onFailure(exception);
}
if (finalRemote.isClosed() == false) {
logger.warn("failed to update seed list for cluster: " + entry.getKey(), exception);
}
}));
}
}
this.remoteClusters = Collections.unmodifiableMap(remoteClusters);
}
/**
* Returns <code>true</code> if at least one remote cluster is configured
*/
public boolean isCrossClusterSearchEnabled() {
return remoteClusters.isEmpty() == false;
}
public boolean isRemoteNodeConnected(final String remoteCluster, final DiscoveryNode node) {
return remoteClusters.get(remoteCluster).isNodeConnected(node);
}
public Map<String, OriginalIndices> groupIndices(IndicesOptions indicesOptions, String[] indices, Predicate<String> indexExists) {
Map<String, OriginalIndices> originalIndicesMap = new HashMap<>();
if (isCrossClusterSearchEnabled()) {
final Map<String, List<String>> groupedIndices = groupClusterIndices(indices, indexExists);
for (Map.Entry<String, List<String>> entry : groupedIndices.entrySet()) {
String clusterAlias = entry.getKey();
List<String> originalIndices = entry.getValue();
originalIndicesMap.put(clusterAlias,
new OriginalIndices(originalIndices.toArray(new String[originalIndices.size()]), indicesOptions));
}
if (originalIndicesMap.containsKey(LOCAL_CLUSTER_GROUP_KEY) == false) {
originalIndicesMap.put(LOCAL_CLUSTER_GROUP_KEY, new OriginalIndices(Strings.EMPTY_ARRAY, indicesOptions));
}
} else {
originalIndicesMap.put(LOCAL_CLUSTER_GROUP_KEY, new OriginalIndices(indices, indicesOptions));
}
return originalIndicesMap;
}
/**
* Returns <code>true</code> iff the given cluster is configured as a remote cluster. Otherwise <code>false</code>
*/
public boolean isRemoteClusterRegistered(String clusterName) {
return remoteClusters.containsKey(clusterName);
}
public void collectSearchShards(IndicesOptions indicesOptions, String preference, String routing,
Map<String, OriginalIndices> remoteIndicesByCluster,
ActionListener<Map<String, ClusterSearchShardsResponse>> listener) {
final CountDown responsesCountDown = new CountDown(remoteIndicesByCluster.size());
final Map<String, ClusterSearchShardsResponse> searchShardsResponses = new ConcurrentHashMap<>();
final AtomicReference<RemoteTransportException> transportException = new AtomicReference<>();
for (Map.Entry<String, OriginalIndices> entry : remoteIndicesByCluster.entrySet()) {
final String clusterName = entry.getKey();
RemoteClusterConnection remoteClusterConnection = remoteClusters.get(clusterName);
if (remoteClusterConnection == null) {
throw new IllegalArgumentException("no such remote cluster: " + clusterName);
}
final String[] indices = entry.getValue().indices();
ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest(indices)
.indicesOptions(indicesOptions).local(true).preference(preference)
.routing(routing);
remoteClusterConnection.fetchSearchShards(searchShardsRequest,
new ActionListener<ClusterSearchShardsResponse>() {
@Override
public void onResponse(ClusterSearchShardsResponse clusterSearchShardsResponse) {
searchShardsResponses.put(clusterName, clusterSearchShardsResponse);
if (responsesCountDown.countDown()) {
RemoteTransportException exception = transportException.get();
if (exception == null) {
listener.onResponse(searchShardsResponses);
} else {
listener.onFailure(transportException.get());
}
}
}
@Override
public void onFailure(Exception e) {
RemoteTransportException exception = new RemoteTransportException("error while communicating with remote cluster ["
+ clusterName + "]", e);
if (transportException.compareAndSet(null, exception) == false) {
exception = transportException.accumulateAndGet(exception, (previous, current) -> {
current.addSuppressed(previous);
return current;
});
}
if (responsesCountDown.countDown()) {
listener.onFailure(exception);
}
}
});
}
}
/**
* Returns a connection to the given node on the given remote cluster
* @throws IllegalArgumentException if the remote cluster is unknown
*/
public Transport.Connection getConnection(DiscoveryNode node, String cluster) {
RemoteClusterConnection connection = remoteClusters.get(cluster);
if (connection == null) {
throw new IllegalArgumentException("no such remote cluster: " + cluster);
}
return connection.getConnection(node);
}
/**
* Ensures that the given cluster alias is connected. If the cluster is connected this operation
* will invoke the listener immediately.
*/
public void ensureConnected(String clusterAlias, ActionListener<Void> listener) {
RemoteClusterConnection remoteClusterConnection = remoteClusters.get(clusterAlias);
if (remoteClusterConnection == null) {
throw new IllegalArgumentException("no such remote cluster: " + clusterAlias);
}
remoteClusterConnection.ensureConnected(listener);
}
public Transport.Connection getConnection(String cluster) {
RemoteClusterConnection connection = remoteClusters.get(cluster);
if (connection == null) {
throw new IllegalArgumentException("no such remote cluster: " + cluster);
}
return connection.getConnection();
}
@Override
protected Set<String> getRemoteClusterNames() {
return this.remoteClusters.keySet();
}
@Override
public void listenForUpdates(ClusterSettings clusterSettings) {
super.listenForUpdates(clusterSettings);
clusterSettings.addAffixUpdateConsumer(REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable,
(clusterAlias, value) -> {});
}
public synchronized void updateSkipUnavailable(String clusterAlias, Boolean skipUnavailable) {
RemoteClusterConnection remote = this.remoteClusters.get(clusterAlias);
if (remote != null) {
remote.updateSkipUnavailable(skipUnavailable);
}
}
public void updateRemoteCluster(String clusterAlias, List<InetSocketAddress> addresses) {
updateRemoteCluster(clusterAlias, addresses, ActionListener.wrap((x) -> {}, (x) -> {}));
}
public void updateRemoteCluster(
final String clusterAlias,
final List<InetSocketAddress> addresses,
final ActionListener<Void> connectionListener) {
final List<DiscoveryNode> nodes = addresses.stream().map(address -> {
final TransportAddress transportAddress = new TransportAddress(address);
final String id = clusterAlias + "#" + transportAddress.toString();
final Version version = Version.CURRENT.minimumCompatibilityVersion();
return new DiscoveryNode(id, transportAddress, version);
}).collect(Collectors.toList());
updateRemoteClusters(Collections.singletonMap(clusterAlias, nodes), connectionListener);
}
/**
* Connects to all remote clusters in a blocking fashion. This should be called on node startup to establish an initial connection
* to all configured seed nodes.
*/
public void initializeRemoteClusters() {
final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
final PlainActionFuture<Void> future = new PlainActionFuture<>();
Map<String, List<DiscoveryNode>> seeds = RemoteClusterAware.buildRemoteClustersSeeds(settings);
updateRemoteClusters(seeds, future);
try {
future.get(timeValue.millis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (TimeoutException ex) {
logger.warn("failed to connect to remote clusters within {}", timeValue.toString());
} catch (Exception e) {
throw new IllegalStateException("failed to connect to remote clusters", e);
}
}
@Override
public void close() throws IOException {
IOUtils.close(remoteClusters.values());
}
public void getRemoteConnectionInfos(ActionListener<Collection<RemoteConnectionInfo>> listener) {
final Map<String, RemoteClusterConnection> remoteClusters = this.remoteClusters;
if (remoteClusters.isEmpty()) {
listener.onResponse(Collections.emptyList());
} else {
final GroupedActionListener<RemoteConnectionInfo> actionListener = new GroupedActionListener<>(listener,
remoteClusters.size(), Collections.emptyList());
for (RemoteClusterConnection connection : remoteClusters.values()) {
connection.getConnectionInfo(actionListener);
}
}
}
/**
* Collects all nodes of the given clusters and returns / passes a (clusterAlias, nodeId) to {@link DiscoveryNode}
* function on success.
*/
public void collectNodes(Set<String> clusters, ActionListener<BiFunction<String, String, DiscoveryNode>> listener) {
Map<String, RemoteClusterConnection> remoteClusters = this.remoteClusters;
for (String cluster : clusters) {
if (remoteClusters.containsKey(cluster) == false) {
listener.onFailure(new IllegalArgumentException("no such remote cluster: [" + cluster + "]"));
return;
}
}
final Map<String, Function<String, DiscoveryNode>> clusterMap = new HashMap<>();
CountDown countDown = new CountDown(clusters.size());
Function<String, DiscoveryNode> nullFunction = s -> null;
for (final String cluster : clusters) {
RemoteClusterConnection connection = remoteClusters.get(cluster);
connection.collectNodes(new ActionListener<Function<String, DiscoveryNode>>() {
@Override
public void onResponse(Function<String, DiscoveryNode> nodeLookup) {
synchronized (clusterMap) {
clusterMap.put(cluster, nodeLookup);
}
if (countDown.countDown()) {
listener.onResponse((clusterAlias, nodeId)
-> clusterMap.getOrDefault(clusterAlias, nullFunction).apply(nodeId));
}
}
@Override
public void onFailure(Exception e) {
if (countDown.fastForward()) { // we need to check if it's true since we could have multiple failures
listener.onFailure(e);
}
}
});
}
}
/**
* Returns a client to the remote cluster if the given cluster alias exists.
* @param threadPool the {@link ThreadPool} for the client
* @param clusterAlias the cluster alias the remote cluster is registered under
*
* @throws IllegalArgumentException if the given clusterAlias doesn't exist
*/
public Client getRemoteClusterClient(ThreadPool threadPool, String clusterAlias) {
if (transportService.getRemoteClusterService().getRemoteClusterNames().contains(clusterAlias) == false) {
throw new IllegalArgumentException("unknown cluster alias [" + clusterAlias + "]");
}
return new RemoteClusterAwareClient(settings, threadPool, transportService, clusterAlias);
}
}
| |
package org.docksidestage.sqlserver.dbflute.allcommon;
import java.util.Collections;
import java.util.Map;
import java.util.HashMap;
import java.lang.reflect.Method;
import org.dbflute.Entity;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.DBMetaProvider;
import org.dbflute.exception.DBMetaNotFoundException;
import org.dbflute.helper.StringKeyMap;
import org.dbflute.util.DfAssertUtil;
/**
* The handler of the instance of DB meta.
* @author DBFlute(AutoGenerator)
*/
public class DBMetaInstanceHandler implements DBMetaProvider {
// ===================================================================================
// Resource Map
// ============
/** The map of DB meta instance by key 'table DB-name'. (NotNull, LazyLoaded) */
protected static final Map<String, DBMeta> _tableDbNameInstanceMap = newHashMap();
/** The map of DB meta instance by key 'entity type'. (NotNull, LazyLoaded) */
protected static final Map<Class<?>, DBMeta> _entityTypeInstanceMap = newHashMap();
/** The map of table DB name and DB meta class name. (NotNull) */
protected static final Map<String, String> _tableDbNameClassNameMap;
static {
final Map<String, String> tmpMap = newHashMap();
tmpMap.put("MEMBER", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.MemberDbm");
tmpMap.put("MEMBER_ADDRESS", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.MemberAddressDbm");
tmpMap.put("MEMBER_LOGIN", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.MemberLoginDbm");
tmpMap.put("MEMBER_SECURITY", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.MemberSecurityDbm");
tmpMap.put("MEMBER_SERVICE", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.MemberServiceDbm");
tmpMap.put("MEMBER_STATUS", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.MemberStatusDbm");
tmpMap.put("MEMBER_WITHDRAWAL", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.MemberWithdrawalDbm");
tmpMap.put("PRODUCT", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.ProductDbm");
tmpMap.put("PRODUCT_CATEGORY", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.ProductCategoryDbm");
tmpMap.put("PRODUCT_STATUS", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.ProductStatusDbm");
tmpMap.put("PURCHASE", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.PurchaseDbm");
tmpMap.put("REGION", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.RegionDbm");
tmpMap.put("SERVICE_RANK", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.ServiceRankDbm");
tmpMap.put("SUMMARY_PRODUCT", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.SummaryProductDbm");
tmpMap.put("SUMMARY_WITHDRAWAL", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.SummaryWithdrawalDbm");
tmpMap.put("VENDOR_CHECK", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.VendorCheckDbm");
tmpMap.put("VENDOR_SYMMETRIC", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.VendorSymmetricDbm");
tmpMap.put("WHITE_DELIMITER", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.WhiteDelimiterDbm");
tmpMap.put("WITHDRAWAL_REASON", "org.docksidestage.sqlserver.dbflute.bsentity.dbmeta.WithdrawalReasonDbm");
_tableDbNameClassNameMap = Collections.unmodifiableMap(tmpMap);
}
/** The flexible map of table DB name for conversion in finding process. (NotNull) */
protected static final Map<String, String> _tableDbNameFlexibleMap = StringKeyMap.createAsFlexible();
static {
for (String tableDbName : _tableDbNameClassNameMap.keySet()) {
_tableDbNameFlexibleMap.put(tableDbName, tableDbName);
}
}
/**
* Get the unmodifiable map of DB meta. map:{tableDbName = DBMeta}
* @return The unmodifiable map that contains all instances of DB meta. (NotNull, NotEmpty)
*/
public static Map<String, DBMeta> getUnmodifiableDBMetaMap() {
initializeDBMetaMap();
synchronized (_tableDbNameInstanceMap) {
return Collections.unmodifiableMap(_tableDbNameInstanceMap);
}
}
/**
* Initialize the map of DB meta.
*/
protected static void initializeDBMetaMap() {
if (isInitialized()) {
return;
}
synchronized (_tableDbNameInstanceMap) {
for (String tableDbName : _tableDbNameClassNameMap.keySet()) {
findDBMeta(tableDbName); // initialize
}
if (!isInitialized()) {
String msg = "Failed to initialize tableDbNameInstanceMap: " + _tableDbNameInstanceMap;
throw new IllegalStateException(msg);
}
}
}
protected static boolean isInitialized() {
return _tableDbNameInstanceMap.size() == _tableDbNameClassNameMap.size();
}
// ===================================================================================
// Provider Singleton
// ==================
protected static final DBMetaProvider _provider = new DBMetaInstanceHandler();
public static DBMetaProvider getProvider() {
return _provider;
}
public DBMeta provideDBMeta(String tableFlexibleName) {
return byTableFlexibleName(tableFlexibleName);
}
public DBMeta provideDBMeta(Class<?> entityType) {
return byEntityType(entityType);
}
public DBMeta provideDBMetaChecked(String tableFlexibleName) {
return findDBMeta(tableFlexibleName);
}
public DBMeta provideDBMetaChecked(Class<?> entityType) {
return findDBMeta(entityType);
}
// ===================================================================================
// Find DBMeta
// ===========
/**
* Find DB meta by table flexible name. (accept quoted name and schema prefix)
* @param tableFlexibleName The flexible name of table. (NotNull)
* @return The instance of DB meta. (NotNull)
* @throws org.dbflute.exception.DBMetaNotFoundException When the DB meta is not found.
*/
public static DBMeta findDBMeta(String tableFlexibleName) {
DBMeta dbmeta = byTableFlexibleName(tableFlexibleName);
if (dbmeta == null) {
String msg = "The DB meta was not found by the table flexible name: key=" + tableFlexibleName;
throw new DBMetaNotFoundException(msg);
}
return dbmeta;
}
/**
* Find DB meta by entity type.
* @param entityType The entity type of table, which should implement the {@link Entity} interface. (NotNull)
* @return The instance of DB meta. (NotNull)
* @throws org.dbflute.exception.DBMetaNotFoundException When the DB meta is not found.
*/
public static DBMeta findDBMeta(Class<?> entityType) {
DBMeta dbmeta = byEntityType(entityType);
if (dbmeta == null) {
String msg = "The DB meta was not found by the entity type: key=" + entityType;
throw new DBMetaNotFoundException(msg);
}
return dbmeta;
}
// ===================================================================================
// by Table Name
// =============
/**
* @param tableFlexibleName The flexible name of table. (NotNull)
* @return The instance of DB meta. (NullAllowed: If the DB meta is not found, it returns null)
*/
protected static DBMeta byTableFlexibleName(String tableFlexibleName) {
assertStringNotNullAndNotTrimmedEmpty("tableFlexibleName", tableFlexibleName);
String tableDbName = _tableDbNameFlexibleMap.get(tableFlexibleName);
if (tableDbName == null) {
tableDbName = retryByNormalizedName(tableFlexibleName);
}
return tableDbName != null ? byTableDbName(tableDbName) : null;
}
protected static String retryByNormalizedName(String tableFlexibleName) {
String tableDbName = null;
String pureName = normalizeTableFlexibleName(tableFlexibleName);
String schema = extractSchemaIfExists(tableFlexibleName);
if (schema != null) { // first, find by qualified name
tableDbName = _tableDbNameFlexibleMap.get(schema + "." + pureName);
}
if (tableDbName == null) { // next, find by pure name
tableDbName = _tableDbNameFlexibleMap.get(pureName);
}
return tableDbName;
}
protected static String normalizeTableFlexibleName(String tableFlexibleName) {
return removeQuoteIfExists(removeSchemaIfExists(tableFlexibleName));
}
protected static String removeQuoteIfExists(String name) {
if (name.startsWith("\"") && name.endsWith("\"")) {
return strip(name);
} else if (name.startsWith("[") && name.endsWith("]")) {
return strip(name);
}
return name;
}
protected static String removeSchemaIfExists(String name) {
int dotLastIndex = name.lastIndexOf(".");
return dotLastIndex >= 0 ? name.substring(dotLastIndex + ".".length()) : name;
}
protected static String extractSchemaIfExists(String name) {
int dotLastIndex = name.lastIndexOf(".");
return dotLastIndex >= 0 ? name.substring(0, dotLastIndex) : null;
}
protected static String strip(String name) {
return name.substring(1, name.length() - 1);
}
/**
* @param tableDbName The DB name of table. (NotNull)
* @return The instance of DB meta. (NullAllowed: If the DB meta is not found, it returns null)
*/
protected static DBMeta byTableDbName(String tableDbName) {
assertStringNotNullAndNotTrimmedEmpty("tableDbName", tableDbName);
return getCachedDBMeta(tableDbName);
}
// ===================================================================================
// by Entity Type
// ==============
/**
* @param entityType The entity type of table, which should implement the entity interface. (NotNull)
* @return The instance of DB meta. (NullAllowed: If the DB meta is not found, it returns null)
*/
protected static DBMeta byEntityType(Class<?> entityType) {
assertObjectNotNull("entityType", entityType);
return getCachedDBMeta(entityType);
}
// ===================================================================================
// Cached DBMeta
// =============
protected static DBMeta getCachedDBMeta(String tableDbName) { // lazy-load (thank you koyak!)
DBMeta dbmeta = _tableDbNameInstanceMap.get(tableDbName);
if (dbmeta != null) {
return dbmeta;
}
synchronized (_tableDbNameInstanceMap) {
dbmeta = _tableDbNameInstanceMap.get(tableDbName);
if (dbmeta != null) {
// an other thread might have initialized
// or reading might failed by same-time writing
return dbmeta;
}
String dbmetaName = _tableDbNameClassNameMap.get(tableDbName);
if (dbmetaName == null) {
return null;
}
_tableDbNameInstanceMap.put(tableDbName, toDBMetaInstance(dbmetaName));
return _tableDbNameInstanceMap.get(tableDbName);
}
}
protected static DBMeta toDBMetaInstance(String dbmetaName) {
try {
Class<?> dbmetaType = Class.forName(dbmetaName);
Method method = dbmetaType.getMethod("getInstance", (Class[])null);
Object result = method.invoke(null, (Object[])null);
return (DBMeta)result;
} catch (Exception e) {
String msg = "Failed to get the instance: " + dbmetaName;
throw new IllegalStateException(msg, e);
}
}
protected static DBMeta getCachedDBMeta(Class<?> entityType) { // lazy-load same as by-name
DBMeta dbmeta = _entityTypeInstanceMap.get(entityType);
if (dbmeta != null) {
return dbmeta;
}
synchronized (_entityTypeInstanceMap) {
dbmeta = _entityTypeInstanceMap.get(entityType);
if (dbmeta != null) {
// an other thread might have initialized
// or reading might failed by same-time writing
return dbmeta;
}
if (Entity.class.isAssignableFrom(entityType)) { // required
Entity entity = newEntity(entityType);
dbmeta = getCachedDBMeta(entity.asTableDbName());
}
if (dbmeta == null) {
return null;
}
_entityTypeInstanceMap.put(entityType, dbmeta);
return _entityTypeInstanceMap.get(entityType);
}
}
protected static Entity newEntity(Class<?> entityType) {
try {
return (Entity)entityType.getDeclaredConstructor().newInstance();
} catch (Exception e) {
String msg = "Failed to new the instance: " + entityType;
throw new IllegalStateException(msg, e);
}
}
// ===================================================================================
// General Helper
// ==============
protected static <KEY, VALUE> HashMap<KEY, VALUE> newHashMap() {
return new HashMap<KEY, VALUE>();
}
// -----------------------------------------------------
// Assert Object
// -------------
protected static void assertObjectNotNull(String variableName, Object value) {
DfAssertUtil.assertObjectNotNull(variableName, value);
}
// -----------------------------------------------------
// Assert String
// -------------
protected static void assertStringNotNullAndNotTrimmedEmpty(String variableName, String value) {
DfAssertUtil.assertStringNotNullAndNotTrimmedEmpty(variableName, value);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.