repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
gwittel/platform | stats/src/test/java/com/proofpoint/stats/TestQuantileDigest.java | 26892 | package com.proofpoint.stats;
import com.google.common.base.Ticker;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.proofpoint.testing.TestingTicker;
import io.airlift.slice.Slice;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestQuantileDigest
{
@Test
public void testSingleAdd()
{
QuantileDigest digest = new QuantileDigest(1);
digest.add(0);
digest.validate();
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
assertEquals(digest.getCount(), (double) 1);
assertEquals(digest.getNodeCount(), 1);
}
@Test
public void testNegativeValues()
{
QuantileDigest digest = new QuantileDigest(1);
addAll(digest, asList(-1, -2, -3, -4, -5, 0, 1, 2, 3, 4, 5));
assertEquals(digest.getCount(), (double) 11);
}
@Test
public void testRepeatedValue()
{
QuantileDigest digest = new QuantileDigest(1);
digest.add(0);
digest.add(0);
digest.validate();
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
assertEquals(digest.getCount(), (double) 2);
assertEquals(digest.getNodeCount(), 1);
}
@Test
public void testTwoDistinctValues()
{
QuantileDigest digest = new QuantileDigest(1);
digest.add(0);
digest.add(Long.MAX_VALUE);
digest.validate();
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
assertEquals(digest.getCount(), (double) 2);
assertEquals(digest.getNodeCount(), 3);
}
@Test
public void testTreeBuilding()
{
QuantileDigest digest = new QuantileDigest(1);
List<Integer> values = asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7);
addAll(digest, values);
assertEquals(digest.getCount(), (double) values.size());
}
@Test
public void testTreeBuildingReverse()
{
QuantileDigest digest = new QuantileDigest(1);
List<Integer> values = asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7);
addAll(digest, Lists.reverse(values));
assertEquals(digest.getCount(), (double) values.size());
}
@Test
public void testBasicCompression()
{
// maxError = 0.8 so that we get compression factor = 5 with the data below
QuantileDigest digest = new QuantileDigest(0.8, 0, new TestingTicker());
List<Integer> values = asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7);
addAll(digest, values);
digest.compress();
digest.validate();
assertEquals(digest.getCount(), (double) values.size());
assertEquals(digest.getNodeCount(), 7);
assertEquals(digest.getConfidenceFactor(), 0.2);
}
@Test
public void testCompression()
{
QuantileDigest digest = new QuantileDigest(1, 0, new TestingTicker());
for (int loop = 0; loop < 2; ++loop) {
addRange(digest, 0, 15);
digest.compress();
digest.validate();
}
}
@Test
public void testQuantile()
{
QuantileDigest digest = new QuantileDigest(1);
addAll(digest, asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9));
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
assertEquals(digest.getQuantile(0.0), 0);
assertEquals(digest.getQuantile(0.1), 1);
assertEquals(digest.getQuantile(0.2), 2);
assertEquals(digest.getQuantile(0.3), 3);
assertEquals(digest.getQuantile(0.4), 4);
assertEquals(digest.getQuantile(0.5), 5);
assertEquals(digest.getQuantile(0.6), 6);
assertEquals(digest.getQuantile(0.7), 7);
assertEquals(digest.getQuantile(0.8), 8);
assertEquals(digest.getQuantile(0.9), 9);
assertEquals(digest.getQuantile(1), 9);
}
@Test
public void testQuantileLowerBound()
{
QuantileDigest digest = new QuantileDigest(0.5);
addRange(digest, 1, 100);
assertEquals(digest.getQuantileLowerBound(0.0), 1);
for (int i = 1; i <= 10; i++) {
assertTrue(digest.getQuantileLowerBound(i / 10.0) <= i * 10);
if (i > 5) {
assertTrue(digest.getQuantileLowerBound(i / 10.0) >= (i - 5) * 10);
}
}
assertEquals(
digest.getQuantilesLowerBound(ImmutableList.of(0.0, 0.1, 0.2)),
ImmutableList.of(digest.getQuantileLowerBound(0.0), digest.getQuantileLowerBound(0.1), digest.getQuantileLowerBound(0.2)));
}
@Test
public void testQuantileUpperBound()
{
QuantileDigest digest = new QuantileDigest(0.5);
addRange(digest, 1, 100);
assertEquals(digest.getQuantileUpperBound(1.0), 99);
for (int i = 0; i < 10; i++) {
assertTrue(digest.getQuantileUpperBound(i / 10.0) >= i * 10);
if (i < 5) {
assertTrue(digest.getQuantileUpperBound(i / 10.0) <= (i + 5) * 10);
}
}
assertEquals(
digest.getQuantilesUpperBound(ImmutableList.of(0.8, 0.9, 1.0)),
ImmutableList.of(digest.getQuantileUpperBound(0.8), digest.getQuantileUpperBound(0.9), digest.getQuantileUpperBound(1.0)));
}
@Test
public void testWeightedValues()
{
QuantileDigest digest = new QuantileDigest(1);
digest.add(0, 3);
digest.add(2, 1);
digest.add(4, 5);
digest.add(5, 1);
digest.validate();
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
assertEquals(digest.getQuantile(0.0), 0);
assertEquals(digest.getQuantile(0.1), 0);
assertEquals(digest.getQuantile(0.2), 0);
assertEquals(digest.getQuantile(0.3), 2);
assertEquals(digest.getQuantile(0.4), 4);
assertEquals(digest.getQuantile(0.5), 4);
assertEquals(digest.getQuantile(0.6), 4);
assertEquals(digest.getQuantile(0.7), 4);
assertEquals(digest.getQuantile(0.8), 4);
assertEquals(digest.getQuantile(0.9), 5);
assertEquals(digest.getQuantile(1), 5);
}
@Test
public void testBatchQuantileQuery()
{
QuantileDigest digest = new QuantileDigest(1);
addAll(digest, asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9));
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
assertEquals(digest.getQuantiles(asList(0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0)),
asList(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 9L));
}
@Test
public void testHistogramQuery()
{
QuantileDigest digest = new QuantileDigest(1);
addAll(digest, asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9));
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
assertEquals(digest.getHistogram(asList(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L)),
asList(new QuantileDigest.Bucket(0, Double.NaN),
new QuantileDigest.Bucket(1, 0),
new QuantileDigest.Bucket(1, 1),
new QuantileDigest.Bucket(1, 2),
new QuantileDigest.Bucket(1, 3),
new QuantileDigest.Bucket(1, 4),
new QuantileDigest.Bucket(1, 5),
new QuantileDigest.Bucket(1, 6),
new QuantileDigest.Bucket(1, 7),
new QuantileDigest.Bucket(1, 8),
new QuantileDigest.Bucket(1, 9)));
assertEquals(digest.getHistogram(asList(7L, 10L)),
asList(new QuantileDigest.Bucket(7, 3),
new QuantileDigest.Bucket(3, 8)));
// test some edge conditions
assertEquals(digest.getHistogram(asList(0L)), asList(new QuantileDigest.Bucket(0, Double.NaN)));
assertEquals(digest.getHistogram(asList(9L)), asList(new QuantileDigest.Bucket(9, 4)));
assertEquals(digest.getHistogram(asList(10L)), asList(new QuantileDigest.Bucket(10, 4.5)));
assertEquals(digest.getHistogram(asList(Long.MAX_VALUE)),
asList(new QuantileDigest.Bucket(10, 4.5)));
}
@Test
public void testHistogramOfDoublesQuery()
{
QuantileDigest digest = new QuantileDigest(1);
LongStream.range(-10, 10)
.map(TestQuantileDigest::doubleToSortableLong)
.boxed()
.forEach(digest::add);
assertEquals(digest.getConfidenceFactor(), 0.0);
List<Long> bucketUpperBounds = LongStream.range(-10, 10)
.map(TestQuantileDigest::doubleToSortableLong)
.boxed()
.collect(toImmutableList());
QuantileDigest.MiddleFunction middleFunction = (lowerBound, upperBound) -> {
// qdigest will put the range at the top of the tree as the entire set of long values. Sortable long values
// which equal Long.MIN_VALUE or Long.MAX_VALUE are NaN values in IEEE 754 standard, therefore they can't
// be accurately represented as floating point numbers. Because NaN values cannot be used in the middle
// calculation, treat them as Double.MIN_VALUE when the min is encountered, and Double.MAX_VALUE when the max
// is encountered.
double left = lowerBound > Long.MIN_VALUE ? sortableLongToDouble(lowerBound) : -1 * Double.MAX_VALUE;
double right = upperBound < Long.MAX_VALUE ? sortableLongToDouble(upperBound) : Double.MAX_VALUE;
return left + (right - left) / 2;
};
List<QuantileDigest.Bucket> expected = LongStream.range(-9, 10)
.boxed()
.map(i -> new QuantileDigest.Bucket(1, i - 1))
.collect(Collectors.toList());
expected.add(0, new QuantileDigest.Bucket(0, Double.NaN));
assertEquals(digest.getHistogram(bucketUpperBounds, middleFunction),
expected);
assertEquals(digest.getHistogram(asList(doubleToSortableLong(7), doubleToSortableLong(10)), middleFunction),
asList(new QuantileDigest.Bucket(17, -2.0),
new QuantileDigest.Bucket(3, 8)));
// edge cases
assertEquals(digest.getHistogram(asList(doubleToSortableLong(-1 * Double.MAX_VALUE)), middleFunction),
asList(new QuantileDigest.Bucket(0, Double.NaN)));
assertEquals(digest.getHistogram(asList(doubleToSortableLong(-1 * Double.MAX_VALUE), doubleToSortableLong(-1 * Double.MAX_VALUE + 1)), middleFunction),
asList(new QuantileDigest.Bucket(0, Double.NaN), new QuantileDigest.Bucket(0, Double.NaN)));
assertEquals(digest.getHistogram(asList(doubleToSortableLong(0)), middleFunction),
asList(new QuantileDigest.Bucket(10.0, -5.5)));
assertEquals(digest.getHistogram(asList(doubleToSortableLong(9)), middleFunction),
asList(new QuantileDigest.Bucket(19, -1.0)));
assertEquals(digest.getHistogram(asList(doubleToSortableLong(10)), middleFunction),
asList(new QuantileDigest.Bucket(20, -0.5)));
assertEquals(digest.getHistogram(asList(doubleToSortableLong(Double.MAX_VALUE)), middleFunction),
asList(new QuantileDigest.Bucket(20, -0.5)));
}
@Test
public void testHistogramQueryAfterCompression()
{
QuantileDigest digest = new QuantileDigest(0.1);
int total = 10000;
addRange(digest, 0, total);
// compression should've run at this error rate and count
assertTrue(digest.getConfidenceFactor() > 0.0);
double actualMaxError = digest.getConfidenceFactor();
for (long value = 0; value < total; ++value) {
QuantileDigest.Bucket bucket = digest.getHistogram(asList(value)).get(0);
// estimated count should have an absolute error smaller than 2 * maxError * N
assertTrue(Math.abs(bucket.getCount() - value) < 2 * actualMaxError * total);
}
}
@Test
public void testQuantileQueryError()
{
double maxError = 0.1;
QuantileDigest digest = new QuantileDigest(maxError);
int count = 10000;
addRange(digest, 0, count);
// compression should've run at this error rate and count
assertTrue(digest.getConfidenceFactor() > 0);
assertTrue(digest.getConfidenceFactor() < maxError);
for (int value = 0; value < count; ++value) {
double quantile = value * 1.0 / count;
long estimatedValue = digest.getQuantile(quantile);
// true rank of estimatedValue is == estimatedValue because
// we've inserted a list of ordered numbers starting at 0
double error = Math.abs(estimatedValue - quantile * count) * 1.0 / count;
assertTrue(error < maxError);
}
}
@Test
public void testDecayedQuantiles()
{
TestingTicker ticker = new TestingTicker();
QuantileDigest digest = new QuantileDigest(1, ExponentialDecay.computeAlpha(0.5, 60), ticker);
addAll(digest, asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9));
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
ticker.elapseTime(60, TimeUnit.SECONDS);
addAll(digest, asList(10, 11, 12, 13, 14, 15, 16, 17, 18, 19));
// Considering that the first 10 values now have a weight of 0.5 per the alpha factor, they only contributed a count
// of 5 to rank computations. Therefore, the 50th percentile is equivalent to a weighted rank of (5 + 10) / 2 = 7.5,
// which corresponds to value 12
assertEquals(digest.getQuantile(0.5), 12);
}
@Test
public void testDecayedCounts()
{
TestingTicker ticker = new TestingTicker();
QuantileDigest digest = new QuantileDigest(1, ExponentialDecay.computeAlpha(0.5, 60), ticker);
addAll(digest, asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9));
// should have no compressions with so few values and the allowed error
assertEquals(digest.getConfidenceFactor(), 0.0);
ticker.elapseTime(60, TimeUnit.SECONDS);
addAll(digest, asList(10, 11, 12, 13, 14, 15, 16, 17, 18, 19));
assertEquals(digest.getCount(), 15.0);
}
@Test
public void testDecayedCountsWithClockIncrementSmallerThanRescaleThreshold()
{
int targetAgeInSeconds = (int) (QuantileDigest.RESCALE_THRESHOLD_SECONDS - 1);
TestingTicker ticker = new TestingTicker();
QuantileDigest digest = new QuantileDigest(1,
ExponentialDecay.computeAlpha(0.5, targetAgeInSeconds), ticker);
addAll(digest, asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9));
ticker.elapseTime(targetAgeInSeconds, TimeUnit.SECONDS);
addAll(digest, asList(10, 11, 12, 13, 14, 15, 16, 17, 18, 19));
assertEquals(digest.getCount(), 15.0);
}
@Test
public void testMinMax()
{
QuantileDigest digest = new QuantileDigest(0.01, 0, new TestingTicker());
int from = 500;
int to = 700;
addRange(digest, from, to + 1);
assertEquals(digest.getMin(), from);
assertEquals(digest.getMax(), to);
}
@Test
public void testMinMaxWithDecay()
{
TestingTicker ticker = new TestingTicker();
QuantileDigest digest = new QuantileDigest(0.01,
ExponentialDecay.computeAlpha(QuantileDigest.ZERO_WEIGHT_THRESHOLD, 60), ticker);
addRange(digest, 1, 10);
ticker.elapseTime(1000, TimeUnit.SECONDS); // TODO: tighter bounds?
int from = 4;
int to = 7;
addRange(digest, from, to + 1);
digest.validate();
assertEquals(digest.getMin(), from);
assertEquals(digest.getMax(), to);
}
@Test
public void testRescaleWithDecayKeepsCompactTree()
{
TestingTicker ticker = new TestingTicker();
int targetAgeInSeconds = (int) (QuantileDigest.RESCALE_THRESHOLD_SECONDS);
QuantileDigest digest = new QuantileDigest(0.01,
ExponentialDecay.computeAlpha(QuantileDigest.ZERO_WEIGHT_THRESHOLD / 2, targetAgeInSeconds),
ticker);
for (int i = 0; i < 10; ++i) {
digest.add(i);
digest.validate();
// bump the clock to make all previous values decay to ~0
ticker.elapseTime(targetAgeInSeconds, TimeUnit.SECONDS);
}
assertEquals(digest.getNodeCount(), 1);
}
@Test
public void testEquivalenceEmpty()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
assertTrue(a.equivalent(b));
}
@Test
public void testEquivalenceSingle()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
a.add(1);
b.add(1);
assertTrue(a.equivalent(b));
}
@Test
public void testEquivalenceSingleDifferent()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
a.add(1);
b.add(2);
assertFalse(a.equivalent(b));
}
@Test
public void testEquivalenceComplex()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
addAll(a, asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7));
addAll(b, asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7));
assertTrue(a.equivalent(b));
}
@Test
public void testEquivalenceComplexDifferent()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
addAll(a, asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7));
addAll(b, asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7, 8));
assertFalse(a.equivalent(b));
}
@Test
public void testMergeEmpty()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
QuantileDigest pristineB = new QuantileDigest(0.01);
a.merge(b);
a.validate();
b.validate();
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 0.0);
assertEquals(a.getNodeCount(), 0);
assertEquals(b.getCount(), 0.0);
assertEquals(b.getNodeCount(), 0);
}
@Test
public void testMergeIntoEmpty()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
QuantileDigest pristineB = new QuantileDigest(0.01);
b.add(1);
pristineB.add(1);
a.merge(b);
a.validate();
b.validate();
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 1.0);
assertEquals(a.getNodeCount(), 1);
assertEquals(b.getCount(), 1.0);
assertEquals(b.getNodeCount(), 1);
}
@Test
public void testMergeWithEmpty()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
QuantileDigest pristineB = new QuantileDigest(0.01);
a.add(1);
a.merge(b);
a.validate();
b.validate();
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 1.0);
assertEquals(a.getNodeCount(), 1);
assertEquals(b.getCount(), 0.0);
assertEquals(b.getNodeCount(), 0);
}
@Test
public void testMergeSample()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
a.add(1);
addAll(b, asList(2, 3));
a.merge(b);
a.validate();
assertEquals(a.getCount(), 3.0);
assertEquals(a.getNodeCount(), 5);
}
@Test
public void testMergeSeparateBranches()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
QuantileDigest pristineB = new QuantileDigest(0.01);
a.add(1);
b.add(2);
pristineB.add(2);
a.merge(b);
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 2.0);
assertEquals(a.getNodeCount(), 3);
assertEquals(b.getCount(), 1.0);
assertEquals(b.getNodeCount(), 1);
}
@Test
public void testMergeWithLowerLevel()
{
QuantileDigest a = new QuantileDigest(1, 0, Ticker.systemTicker());
QuantileDigest b = new QuantileDigest(1, 0, Ticker.systemTicker());
QuantileDigest pristineB = new QuantileDigest(1, 0, Ticker.systemTicker());
a.add(6);
a.compress();
List<Integer> values = asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5);
addAll(b, values);
b.compress();
addAll(pristineB, values);
pristineB.compress();
a.merge(b);
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 14.0);
assertEquals(b.getCount(), 13.0);
}
@Test
public void testMergeWithHigherLevel()
{
QuantileDigest a = new QuantileDigest(1, 0, Ticker.systemTicker());
QuantileDigest b = new QuantileDigest(1, 0, Ticker.systemTicker());
QuantileDigest pristineB = new QuantileDigest(1, 0, Ticker.systemTicker());
addAll(a, asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5));
a.compress();
addAll(b, asList(6, 7));
addAll(pristineB, asList(6, 7));
a.merge(b);
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 15.0);
assertEquals(a.getNodeCount(), 7);
assertEquals(b.getCount(), 2.0);
assertEquals(b.getNodeCount(), 3);
}
// test merging two digests that have a node at the highest level to make sure
// we handle boundary conditions properly
@Test
public void testMergeMaxLevel()
{
QuantileDigest a = new QuantileDigest(0.01);
QuantileDigest b = new QuantileDigest(0.01);
QuantileDigest pristineB = new QuantileDigest(0.01);
addAll(a, asList(-1, 1));
addAll(b, asList(-2, 2));
addAll(pristineB, asList(-2, 2));
a.merge(b);
a.validate();
b.validate();
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 4.0);
assertEquals(a.getNodeCount(), 7);
}
@Test
public void testMergeSameLevel()
{
QuantileDigest a = new QuantileDigest(1, 0, Ticker.systemTicker());
QuantileDigest b = new QuantileDigest(1, 0, Ticker.systemTicker());
QuantileDigest pristineB = new QuantileDigest(1, 0, Ticker.systemTicker());
a.add(0);
b.add(0);
pristineB.add(0);
a.merge(b);
assertTrue(b.equivalent(pristineB));
assertEquals(a.getCount(), 2.0);
assertEquals(a.getNodeCount(), 1);
assertEquals(b.getCount(), 1.0);
assertEquals(b.getNodeCount(), 1);
}
@Test
public void testSerializationEmpty()
throws Exception
{
QuantileDigest digest = new QuantileDigest(0.01);
QuantileDigest deserialized = deserialize(digest.serialize());
assertTrue(digest.equivalent(deserialized));
}
@Test
public void testSerializationSingle()
throws Exception
{
QuantileDigest digest = new QuantileDigest(0.01);
digest.add(1);
assertTrue(digest.equivalent(deserialize(digest.serialize())));
}
@Test
public void testSerializationComplex()
throws Exception
{
QuantileDigest digest = new QuantileDigest(1);
addAll(digest, asList(0, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 5, 6, 7));
assertTrue(digest.equivalent(deserialize(digest.serialize())));
digest.compress();
assertTrue(digest.equivalent(deserialize(digest.serialize())));
}
@Test
public void testSerializationWithExtremeEndsOfLong()
throws Exception
{
QuantileDigest digest = new QuantileDigest(1);
digest.add(Long.MIN_VALUE);
digest.add(Long.MAX_VALUE);
assertTrue(digest.equivalent(deserialize(digest.serialize())));
digest.compress();
assertTrue(digest.equivalent(deserialize(digest.serialize())));
}
@Test(invocationCount = 1000)
public void testSerializationRandom()
throws Exception
{
QuantileDigest digest = new QuantileDigest(1);
List<Integer> values = new ArrayList<>();
for (int i = 0; i < 1000; i++) {
values.add(ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE));
}
addAll(digest, values);
assertTrue(digest.equivalent(deserialize(digest.serialize())), format("Serialization roundtrip failed for input: %s", values));
}
private QuantileDigest deserialize(Slice serialized)
{
QuantileDigest result = new QuantileDigest(serialized);
result.validate();
return result;
}
private void addAll(QuantileDigest digest, List<Integer> values)
{
for (int value : values) {
digest.add(value);
}
digest.validate();
}
private void addRange(QuantileDigest digest, int from, int to)
{
for (int i = from; i < to; ++i) {
digest.add(i);
}
digest.validate();
}
private static long doubleToSortableLong(double value)
{
long bits = Double.doubleToLongBits(value);
return bits ^ ((bits >> 63) & Long.MAX_VALUE);
}
private static double sortableLongToDouble(long value)
{
value = value ^ ((value >> 63) & Long.MAX_VALUE);
return Double.longBitsToDouble(value);
}
}
| apache-2.0 |
a186/pentaho-kettle | engine/src/org/pentaho/di/pan/Pan.java | 25579 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.pan;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.KettleClientEnvironment;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.FileLoggingEventListener;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.parameters.NamedParams;
import org.pentaho.di.core.parameters.NamedParamsDefault;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.RepositoryPluginType;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.kitchen.Kitchen;
import org.pentaho.di.metastore.MetaStoreConst;
import org.pentaho.di.repository.RepositoriesMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryMeta;
import org.pentaho.di.repository.RepositoryOperation;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.version.BuildVersion;
import org.pentaho.metastore.stores.delegate.DelegatingMetaStore;
import org.w3c.dom.Document;
public class Pan {
private static Class<?> PKG = Pan.class; // for i18n purposes, needed by Translator2!!
private static final String STRING_PAN = "Pan";
private static FileLoggingEventListener fileLoggingEventListener;
public static void main( String[] a ) throws Exception {
KettleEnvironment.init();
KettleClientEnvironment.getInstance().setClient( KettleClientEnvironment.ClientType.PAN );
List<String> args = new ArrayList<String>();
for ( int i = 0; i < a.length; i++ ) {
if ( a[i].length() > 0 ) {
args.add( a[i] );
}
}
DelegatingMetaStore metaStore = new DelegatingMetaStore();
metaStore.addMetaStore( MetaStoreConst.openLocalPentahoMetaStore() );
metaStore.setActiveMetaStoreName( metaStore.getName() );
RepositoryMeta repositoryMeta = null;
Trans trans = null;
// The options:
StringBuffer optionRepname, optionUsername, optionPassword, optionTransname, optionDirname;
StringBuffer optionFilename, optionLoglevel, optionLogfile, optionLogfileOld, optionListdir;
StringBuffer optionListtrans, optionListrep, optionExprep, optionNorep, optionSafemode;
StringBuffer optionVersion, optionJarFilename, optionListParam, optionMetrics;
NamedParams optionParams = new NamedParamsDefault();
CommandLineOption maxLogLinesOption =
new CommandLineOption(
"maxloglines", BaseMessages.getString( PKG, "Pan.CmdLine.MaxLogLines" ), new StringBuffer() );
CommandLineOption maxLogTimeoutOption =
new CommandLineOption(
"maxlogtimeout", BaseMessages.getString( PKG, "Pan.CmdLine.MaxLogTimeout" ), new StringBuffer() );
CommandLineOption[] options =
new CommandLineOption[] {
new CommandLineOption( "rep", BaseMessages.getString( PKG, "Pan.ComdLine.RepName" ), optionRepname =
new StringBuffer() ),
new CommandLineOption(
"user", BaseMessages.getString( PKG, "Pan.ComdLine.RepUsername" ), optionUsername =
new StringBuffer() ),
new CommandLineOption(
"pass", BaseMessages.getString( PKG, "Pan.ComdLine.RepPassword" ), optionPassword =
new StringBuffer() ),
new CommandLineOption(
"trans", BaseMessages.getString( PKG, "Pan.ComdLine.TransName" ), optionTransname =
new StringBuffer() ),
new CommandLineOption( "dir", BaseMessages.getString( PKG, "Pan.ComdLine.RepDir" ), optionDirname =
new StringBuffer() ),
new CommandLineOption(
"file", BaseMessages.getString( PKG, "Pan.ComdLine.XMLTransFile" ), optionFilename =
new StringBuffer() ),
new CommandLineOption(
"level", BaseMessages.getString( PKG, "Pan.ComdLine.LogLevel" ), optionLoglevel =
new StringBuffer() ),
new CommandLineOption(
"logfile", BaseMessages.getString( PKG, "Pan.ComdLine.LogFile" ), optionLogfile =
new StringBuffer() ),
new CommandLineOption(
"log", BaseMessages.getString( PKG, "Pan.ComdLine.LogOldFile" ), optionLogfileOld =
new StringBuffer(), false, true ),
new CommandLineOption(
"listdir", BaseMessages.getString( PKG, "Pan.ComdLine.ListDirRep" ), optionListdir =
new StringBuffer(), true, false ),
new CommandLineOption(
"listtrans", BaseMessages.getString( PKG, "Pan.ComdLine.ListTransDir" ), optionListtrans =
new StringBuffer(), true, false ),
new CommandLineOption(
"listrep", BaseMessages.getString( PKG, "Pan.ComdLine.ListReps" ), optionListrep =
new StringBuffer(), true, false ),
new CommandLineOption(
"exprep", BaseMessages.getString( PKG, "Pan.ComdLine.ExpObjectsXML" ), optionExprep =
new StringBuffer(), true, false ),
new CommandLineOption( "norep", BaseMessages.getString( PKG, "Pan.ComdLine.NoRep" ), optionNorep =
new StringBuffer(), true, false ),
new CommandLineOption(
"safemode", BaseMessages.getString( PKG, "Pan.ComdLine.SafeMode" ), optionSafemode =
new StringBuffer(), true, false ),
new CommandLineOption(
"version", BaseMessages.getString( PKG, "Pan.ComdLine.Version" ), optionVersion =
new StringBuffer(), true, false ),
new CommandLineOption(
"jarfile", BaseMessages.getString( PKG, "Pan.ComdLine.JarFile" ), optionJarFilename =
new StringBuffer(), false, true ),
new CommandLineOption(
"param", BaseMessages.getString( PKG, "Pan.ComdLine.Param" ), optionParams, false ),
new CommandLineOption(
"listparam", BaseMessages.getString( PKG, "Pan.ComdLine.ListParam" ), optionListParam =
new StringBuffer(), true, false ),
new CommandLineOption(
"metrics", BaseMessages.getString( PKG, "Pan.ComdLine.Metrics" ), optionMetrics =
new StringBuffer(), true, false ), maxLogLinesOption, maxLogTimeoutOption };
if ( args.size() == 0 ) {
CommandLineOption.printUsage( options );
exitJVM( 9 );
}
LogChannelInterface log = new LogChannel( STRING_PAN );
// Parse the options...
if ( !CommandLineOption.parseArguments( args, options, log ) ) {
log.logError( BaseMessages.getString( PKG, "Pan.Error.CommandLineError" ) );
exitJVM( 8 );
}
Kitchen.configureLogging( maxLogLinesOption, maxLogTimeoutOption );
String kettleRepname = Const.getEnvironmentVariable( "KETTLE_REPOSITORY", null );
String kettleUsername = Const.getEnvironmentVariable( "KETTLE_USER", null );
String kettlePassword = Const.getEnvironmentVariable( "KETTLE_PASSWORD", null );
if ( kettleRepname != null && kettleRepname.length() > 0 ) {
optionRepname = new StringBuffer( kettleRepname );
}
if ( kettleUsername != null && kettleUsername.length() > 0 ) {
optionUsername = new StringBuffer( kettleUsername );
}
if ( kettlePassword != null && kettlePassword.length() > 0 ) {
optionPassword = new StringBuffer( kettlePassword );
}
if ( Const.isEmpty( optionLogfile ) && !Const.isEmpty( optionLogfileOld ) ) {
// if the old style of logging name is filled in, and the new one is not
// overwrite the new by the old
optionLogfile = optionLogfileOld;
}
if ( !Const.isEmpty( optionLogfile ) ) {
fileLoggingEventListener = new FileLoggingEventListener( optionLogfile.toString(), true );
KettleLogStore.getAppender().addLoggingEventListener( fileLoggingEventListener );
} else {
fileLoggingEventListener = null;
}
if ( !Const.isEmpty( optionLoglevel ) ) {
log.setLogLevel( LogLevel.getLogLevelForCode( optionLoglevel.toString() ) );
log.logMinimal( BaseMessages.getString( PKG, "Pan.Log.Loglevel", log.getLogLevel().getDescription() ) );
}
if ( !Const.isEmpty( optionVersion ) ) {
BuildVersion buildVersion = BuildVersion.getInstance();
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString(
PKG, "Pan.Log.KettleVersion", buildVersion.getVersion(), buildVersion.getRevision(), buildVersion
.getBuildDate() ) );
}
if ( a.length == 1 ) {
exitJVM( 6 );
}
}
// ///////////////////////////////////////////////////////////////////////////////////////////////////
// This is where the action starts.
// Print the options before we start processing when running in Debug or
// Rowlevel
//
if ( log.isDebug() ) {
System.out.println( "Arguments:" );
for ( int i = 0; i < options.length; i++ ) {
/* if (!options[i].isHiddenOption()) */
System.out.println( Const.rightPad( options[i].getOption(), 12 ) + " : " + options[i].getArgument() );
}
System.out.println( "" );
}
// ///////////////////////////////////////////////////////////////////////////////////////////////////
log.logMinimal( BaseMessages.getString( PKG, "Pan.Log.StartingToRun" ) );
Date start, stop;
Calendar cal;
SimpleDateFormat df = new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss.SSS" );
cal = Calendar.getInstance();
start = cal.getTime();
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.AllocatteNewTrans" ) );
}
TransMeta transMeta = new TransMeta();
// In case we use a repository...
Repository rep = null;
try {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.StartingToLookOptions" ) );
}
// Read kettle transformation specified on command-line?
if ( !Const.isEmpty( optionRepname )
|| !Const.isEmpty( optionFilename ) || !Const.isEmpty( optionJarFilename ) ) {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.ParsingCommandline" ) );
}
if ( !Const.isEmpty( optionRepname ) && !"Y".equalsIgnoreCase( optionNorep.toString() ) ) {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.LoadingAvailableRep" ) );
}
RepositoriesMeta repsinfo = new RepositoriesMeta();
try {
repsinfo.readData();
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG, "Pan.Error.NoRepsDefined" ), e );
}
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.FindingRep", "" + optionRepname ) );
}
repositoryMeta = repsinfo.findRepository( optionRepname.toString() );
if ( repositoryMeta != null ) {
// Define and connect to the repository...
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.Allocate&ConnectRep" ) );
}
rep =
PluginRegistry.getInstance().loadClass(
RepositoryPluginType.class, repositoryMeta, Repository.class );
rep.init( repositoryMeta );
rep.connect( optionUsername != null ? optionUsername.toString() : null, optionPassword != null
? optionPassword.toString() : null );
rep.getSecurityProvider().validateAction( RepositoryOperation.EXECUTE_TRANSFORMATION );
// Default is the root directory
//
RepositoryDirectoryInterface directory = rep.loadRepositoryDirectoryTree();
// Add the IMetaStore of the repository to our delegation
//
if ( rep.getMetaStore() != null ) {
metaStore.addMetaStore( rep.getMetaStore() );
}
// Find the directory name if one is specified...
if ( !Const.isEmpty( optionDirname ) ) {
directory = directory.findDirectory( optionDirname.toString() );
}
if ( directory != null ) {
// Check username, password
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.CheckSuppliedUserPass" ) );
}
// Load a transformation
if ( !Const.isEmpty( optionTransname ) ) {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.LoadTransInfo" ) );
}
transMeta = rep.loadTransformation( optionTransname.toString(), directory, null, true, null );
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.AllocateTrans" ) );
}
trans = new Trans( transMeta );
trans.setRepository( rep );
trans.setMetaStore( metaStore );
} else if ( "Y".equalsIgnoreCase( optionListtrans.toString() ) ) {
// List the transformations in the repository
if ( log.isDebug() ) {
log
.logDebug( BaseMessages.getString( PKG, "Pan.Log.GettingListTransDirectory", "" + directory ) );
}
String[] transnames = rep.getTransformationNames( directory.getObjectId(), false );
for ( int i = 0; i < transnames.length; i++ ) {
System.out.println( transnames[i] );
}
} else if ( "Y".equalsIgnoreCase( optionListdir.toString() ) ) {
// List the directories in the repository
String[] dirnames = rep.getDirectoryNames( directory.getObjectId() );
for ( int i = 0; i < dirnames.length; i++ ) {
System.out.println( dirnames[i] );
}
} else if ( !Const.isEmpty( optionExprep ) ) {
// Export the repository
System.out.println( BaseMessages.getString( PKG, "Pan.Log.ExportingObjectsRepToFile", ""
+ optionExprep ) );
rep.getExporter().exportAllObjects( null, optionExprep.toString(), directory, "all" );
System.out.println( BaseMessages.getString( PKG, "Pan.Log.FinishedExportObjectsRepToFile", ""
+ optionExprep ) );
} else {
System.out.println( BaseMessages.getString( PKG, "Pan.Error.NoTransNameSupplied" ) );
}
} else {
System.out.println( BaseMessages.getString( PKG, "Pan.Error.CanNotFindSpecifiedDirectory", ""
+ optionDirname ) );
repositoryMeta = null;
}
} else {
System.out.println( BaseMessages.getString( PKG, "Pan.Error.NoRepProvided" ) );
}
}
// Try to load the transformation from file, even if it failed to load
// from the repository
// You could implement some fail-over mechanism this way.
//
if ( trans == null && !Const.isEmpty( optionFilename ) ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Pan.Log.LoadingTransXML", "" + optionFilename ) );
}
transMeta = new TransMeta( optionFilename.toString() );
trans = new Trans( transMeta );
}
// Try to load the transformation from a jar file
//
if ( trans == null && !Const.isEmpty( optionJarFilename ) ) {
try {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Pan.Log.LoadingTransJar", "" + optionJarFilename ) );
}
InputStream inputStream = Pan.class.getResourceAsStream( optionJarFilename.toString() );
StringBuffer xml = new StringBuffer();
int c;
while ( ( c = inputStream.read() ) != -1 ) {
xml.append( (char) c );
}
inputStream.close();
Document document = XMLHandler.loadXMLString( xml.toString() );
transMeta = new TransMeta( XMLHandler.getSubNode( document, "transformation" ), null );
trans = new Trans( transMeta );
} catch ( Exception e ) {
System.out.println( BaseMessages.getString( PKG, "Pan.Error.ReadingJar", e.toString() ) );
System.out.println( Const.getStackTracker( e ) );
throw e;
}
}
}
if ( "Y".equalsIgnoreCase( optionListrep.toString() ) ) {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Pan.Log.GettingListReps" ) );
}
RepositoriesMeta ri = new RepositoriesMeta();
try {
ri.readData();
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG, "Pan.Error.UnableReadXML" ), e );
}
System.out.println( BaseMessages.getString( PKG, "Pan.Log.ListReps" ) );
for ( int i = 0; i < ri.nrRepositories(); i++ ) {
RepositoryMeta rinfo = ri.getRepository( i );
System.out.println( BaseMessages.getString(
PKG, "Pan.Log.RepNameDesc", "" + ( i + 1 ), rinfo.getName(), rinfo.getDescription() ) );
}
}
} catch ( Exception e ) {
trans = null;
transMeta = null;
if ( rep != null ) {
rep.disconnect();
}
System.out.println( BaseMessages.getString( PKG, "Pan.Error.ProcessStopError", e.getMessage() ) );
e.printStackTrace();
exitJVM( 1 );
}
if ( trans == null ) {
if ( rep != null ) {
rep.disconnect();
}
if ( !"Y".equalsIgnoreCase( optionListtrans.toString() )
&& !"Y".equalsIgnoreCase( optionListdir.toString() ) && !"Y".equalsIgnoreCase( optionListrep.toString() )
&& Const.isEmpty( optionExprep ) ) {
System.out.println( BaseMessages.getString( PKG, "Pan.Error.CanNotLoadTrans" ) );
exitJVM( 7 );
} else {
exitJVM( 0 );
}
}
try {
trans.initializeVariablesFrom( null );
trans.getTransMeta().setInternalKettleVariables( trans );
trans.setLogLevel( log.getLogLevel() );
// Map the command line named parameters to the actual named parameters.
// Skip for
// the moment any extra command line parameter not known in the
// transformation.
String[] transParams = trans.listParameters();
for ( String param : transParams ) {
String value = optionParams.getParameterValue( param );
if ( value != null ) {
trans.setParameterValue( param, value );
}
}
// Put the parameters over the already defined variable space. Parameters
// get priority.
trans.activateParameters();
// See if we want to run in safe mode:
if ( "Y".equalsIgnoreCase( optionSafemode.toString() ) ) {
trans.setSafeModeEnabled( true );
}
// Enable kettle metric gathering if required:
if ( "Y".equalsIgnoreCase( optionMetrics.toString() ) ) {
trans.setGatheringMetrics( true );
}
// List the parameters defined in this transformation
// Then simply exit...
//
if ( "Y".equalsIgnoreCase( optionListParam.toString() ) ) {
for ( String parameterName : trans.listParameters() ) {
String value = trans.getParameterValue( parameterName );
String deflt = trans.getParameterDefault( parameterName );
String descr = trans.getParameterDescription( parameterName );
if ( deflt != null ) {
System.out.println( "Parameter: "
+ parameterName + "=" + Const.NVL( value, "" ) + ", default=" + deflt + " : "
+ Const.NVL( descr, "" ) );
} else {
System.out.println( "Parameter: "
+ parameterName + "=" + Const.NVL( value, "" ) + " : " + Const.NVL( descr, "" ) );
}
}
// stop right here...
//
exitJVM( 7 ); // same as the other list options
}
// allocate & run the required sub-threads
try {
trans.execute( args.toArray( new String[args.size()] ) );
} catch ( KettleException e ) {
System.out.println( BaseMessages.getString( PKG, "Pan.Error.UnablePrepareInitTrans" ) );
exitJVM( 3 );
}
trans.waitUntilFinished();
// Give the transformation up to 10 seconds to finish execution
for ( int i = 0; i < 100; i++ ) {
if ( !trans.isRunning() ) {
break;
}
try {
Thread.sleep( 100 );
} catch ( Exception e ) {
break;
}
}
if ( trans.isRunning() ) {
log.logError( BaseMessages.getString( PKG, "Pan.Log.NotStopping" ) );
}
log.logMinimal( BaseMessages.getString( PKG, "Pan.Log.Finished" ) );
cal = Calendar.getInstance();
stop = cal.getTime();
String begin = df.format( start ).toString();
String end = df.format( stop ).toString();
log.logMinimal( BaseMessages.getString( PKG, "Pan.Log.StartStop", begin, end ) );
long millis = stop.getTime() - start.getTime();
int seconds = (int) ( millis / 1000 );
if ( seconds <= 60 ) {
log.logMinimal( BaseMessages.getString( PKG, "Pan.Log.ProcessingEndAfter", String.valueOf( seconds ) ) );
} else if ( seconds <= 60 * 60 ) {
int min = ( seconds / 60 );
int rem = ( seconds % 60 );
log.logMinimal( BaseMessages.getString(
PKG, "Pan.Log.ProcessingEndAfterLong", String.valueOf( min ), String.valueOf( rem ), String
.valueOf( seconds ) ) );
} else if ( seconds <= 60 * 60 * 24 ) {
int rem;
int hour = ( seconds / ( 60 * 60 ) );
rem = ( seconds % ( 60 * 60 ) );
int min = rem / 60;
rem = rem % 60;
log.logMinimal( BaseMessages.getString(
PKG, "Pan.Log.ProcessingEndAfterLonger", String.valueOf( hour ), String.valueOf( min ), String
.valueOf( rem ), String.valueOf( seconds ) ) );
} else {
int rem;
int days = ( seconds / ( 60 * 60 * 24 ) );
rem = ( seconds % ( 60 * 60 * 24 ) );
int hour = rem / ( 60 * 60 );
rem = rem % ( 60 * 60 );
int min = rem / 60;
rem = rem % 60;
log.logMinimal( BaseMessages.getString(
PKG, "Pan.Log.ProcessingEndAfterLongest", String.valueOf( days ), String.valueOf( hour ), String
.valueOf( min ), String.valueOf( rem ), String.valueOf( seconds ) ) );
}
if ( trans.getResult().getNrErrors() == 0 ) {
trans.printStats( seconds );
exitJVM( 0 );
} else {
String transJVMExitCode = trans.getVariable( Const.KETTLE_TRANS_PAN_JVM_EXIT_CODE );
// If the trans has a return code to return to the OS, then we exit with that
if ( !Const.isEmpty( transJVMExitCode ) ) {
try {
exitJVM( Integer.valueOf( transJVMExitCode ) );
} catch ( NumberFormatException nfe ) {
log
.logError( BaseMessages.getString(
PKG, "Pan.Error.TransJVMExitCodeInvalid", Const.KETTLE_TRANS_PAN_JVM_EXIT_CODE,
transJVMExitCode ) );
log.logError( BaseMessages.getString( PKG, "Pan.Log.JVMExitCode", "1" ) );
exitJVM( 1 );
}
} else { // the trans does not have a return code.
exitJVM( 1 );
}
}
} catch ( KettleException ke ) {
System.out.println( BaseMessages.getString( PKG, "Pan.Log.ErrorOccurred", "" + ke.getMessage() ) );
log.logError( BaseMessages.getString( PKG, "Pan.Log.UnexpectedErrorOccurred", "" + ke.getMessage() ) );
exitJVM( 2 );
} finally {
if ( rep != null ) {
rep.disconnect();
}
}
}
private static final void exitJVM( int status ) {
// Let's not forget to close the log file we're writing to...
//
if ( fileLoggingEventListener != null ) {
try {
fileLoggingEventListener.close();
} catch ( Exception e ) {
e.printStackTrace( System.err );
status = 1;
}
KettleLogStore.getAppender().removeLoggingEventListener( fileLoggingEventListener );
}
System.exit( status );
}
}
| apache-2.0 |
howepeng/isis | core/runtime/src/main/java/org/apache/isis/core/runtime/persistence/objectstore/transaction/PersistenceCommandAbstract.java | 1761 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.runtime.persistence.objectstore.transaction;
import org.apache.isis.core.commons.authentication.AuthenticationSession;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.core.runtime.system.context.IsisContext;
public abstract class PersistenceCommandAbstract implements PersistenceCommand {
private final ObjectAdapter adapter;
public PersistenceCommandAbstract(final ObjectAdapter adapter) {
super();
this.adapter = adapter;
}
@Override
public ObjectAdapter onAdapter() {
return adapter;
}
// /////////////////////////////////////////////////////////////////////
// Dependencies (from context)
// /////////////////////////////////////////////////////////////////////
protected static AuthenticationSession getAuthenticationSession() {
return IsisContext.getAuthenticationSession();
}
}
| apache-2.0 |
WeaxMe/Orienteer | orienteer-architect/src/main/java/org/orienteer/architect/component/behavior/GenerateJavaSourcesBehavior.java | 2078 | package org.orienteer.architect.component.behavior;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AbstractDefaultAjaxBehavior;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.OnLoadHeaderItem;
import org.apache.wicket.model.ResourceModel;
import org.apache.wicket.model.util.ListModel;
import org.apache.wicket.request.IRequestParameters;
import org.apache.wicket.request.cycle.RequestCycle;
import org.orienteer.architect.component.panel.JavaSourcesPanel;
import org.orienteer.architect.component.widget.OArchitectEditorWidget;
import org.orienteer.architect.event.OpenModalWindowEvent;
import org.orienteer.architect.util.JsonUtil;
import org.orienteer.architect.util.OArchitectJsUtils;
/**
* Behavior for generate Java sources from given classes
*/
public class GenerateJavaSourcesBehavior extends AbstractDefaultAjaxBehavior {
private static final String JSON_VAR = "json";
private final OArchitectEditorWidget widget;
public GenerateJavaSourcesBehavior(OArchitectEditorWidget widget) {
this.widget = widget;
}
@Override
protected void respond(AjaxRequestTarget target) {
IRequestParameters params = RequestCycle.get().getRequest().getRequestParameters();
String json = params.getParameterValue(JSON_VAR).toString("[]");
target.prependJavaScript(OArchitectJsUtils.switchPageScroll(true));
widget.onModalWindowEvent(
new OpenModalWindowEvent(
target,
new ResourceModel("widget.architect.editor.java.sources"),
id -> new JavaSourcesPanel(id, new ListModel<>(JsonUtil.fromJSON(json)))
)
);
}
@Override
public void renderHead(Component component, IHeaderResponse response) {
super.renderHead(component, response);
response.render(OnLoadHeaderItem.forScript(String.format("app.setGenerateJavaSources('%s');",
getCallbackUrl())));
}
}
| apache-2.0 |
jmartisk/hibernate-validator | engine/src/test/java/org/hibernate/validator/test/internal/engine/methodvalidation/service/CustomerRepositoryWithRedefinedDefaultGroupImpl.java | 2083 | /*
* JBoss, Home of Professional Open Source
* Copyright 2011, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibernate.validator.test.internal.engine.methodvalidation.service;
import javax.validation.GroupSequence;
import org.hibernate.validator.test.internal.engine.methodvalidation.service.CustomerRepositoryWithRedefinedDefaultGroup.ValidationGroup1;
/**
* @author Gunnar Morling
*/
@GroupSequence({ ValidationGroup1.class, CustomerRepositoryWithRedefinedDefaultGroupImpl.class })
public class CustomerRepositoryWithRedefinedDefaultGroupImpl implements CustomerRepositoryWithRedefinedDefaultGroup {
public void noConstraintInDefaultGroup(String name) {
}
public void constraintInDefaultGroup(String name) {
}
public void constraintInLaterPartOfDefaultSequence(int param) {
}
public void constraintInLaterPartOfDefaultSequenceAtDifferentParameters(int param1, int param2) {
}
public void constraintInLaterPartOfGroupSequence(int param) {
}
public void constraintInLaterPartOfGroupSequenceAtDifferentParameters(int param1, int param2) {
}
// methods used for return value validation tests
public String noConstraintInDefaultGroupAtReturnValue() {
return null;
}
public String constraintInDefaultGroupAtReturnValue() {
return null;
}
public int constraintsInAllPartOfDefaultSequence() {
return 1;
}
public int constraintsInAllPartsOfGroupSequence() {
return 1;
}
}
| apache-2.0 |
aledsage/legacy-brooklyn | core/src/main/java/brooklyn/event/basic/AttributeMap.java | 5527 | package brooklyn.event.basic;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.Serializable;
import java.util.Collection;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.AbstractEntity;
import brooklyn.event.AttributeSensor;
import brooklyn.util.flags.TypeCoercions;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
/**
* A {@link Map} of {@link Entity} attribute values.
*/
public final class AttributeMap implements Serializable {
private static final long serialVersionUID = -6834883734250888344L;
static final Logger log = LoggerFactory.getLogger(AttributeMap.class);
private static enum Marker {
NULL;
}
private final AbstractEntity entity;
// Note that we synchronize on the top-level map, to handle concurrent updates and and gets (ENGR-2111)
private final Map<Collection<String>, Object> values;
/**
* Creates a new AttributeMap.
*
* @param entity the EntityLocal this AttributeMap belongs to.
* @throws IllegalArgumentException if entity is null
*/
public AttributeMap(AbstractEntity entity, Map<Collection<String>, Object> storage) {
this.entity = checkNotNull(entity, "entity must be specified");
this.values = checkNotNull(storage, "storage map must not be null");
}
public Map<Collection<String>, Object> asRawMap() {
return ImmutableMap.copyOf(values);
}
public Map<String, Object> asMap() {
Map<String, Object> result = Maps.newLinkedHashMap();
for (Map.Entry<Collection<String>, Object> entry : values.entrySet()) {
String sensorName = Joiner.on('.').join(entry.getKey());
Object val = (isNull(entry.getValue())) ? null : entry.getValue();
result.put(sensorName, val);
}
return result;
}
/**
* Updates the value.
*
* @param path the path to the value.
* @param newValue the new value
* @return the old value.
* @throws IllegalArgumentException if path is null or empty
*/
// TODO path must be ordered(and legal to contain duplicates like "a.b.a"; list would be better
public <T> T update(Collection<String> path, T newValue) {
checkPath(path);
if (newValue == null) {
newValue = typedNull();
}
if (log.isTraceEnabled()) {
log.trace("setting sensor {}={} for {}", new Object[] {path, newValue, entity});
}
@SuppressWarnings("unchecked")
T oldValue = (T) values.put(path, newValue);
return (isNull(oldValue)) ? null : oldValue;
}
private void checkPath(Collection<String> path) {
Preconditions.checkNotNull(path, "path can't be null");
Preconditions.checkArgument(!path.isEmpty(), "path can't be empty");
}
public <T> T update(AttributeSensor<T> attribute, T newValue) {
T oldValue = updateWithoutPublishing(attribute, newValue);
entity.emitInternal(attribute, newValue);
return oldValue;
}
public <T> T updateWithoutPublishing(AttributeSensor<T> attribute, T newValue) {
if (log.isTraceEnabled()) {
Object oldValue = getValue(attribute);
if (!Objects.equal(oldValue, newValue != null)) {
log.trace("setting attribute {} to {} (was {}) on {}", new Object[] {attribute.getName(), newValue, oldValue, entity});
} else {
log.trace("setting attribute {} to {} (unchanged) on {}", new Object[] {attribute.getName(), newValue, this});
}
}
T oldValue = (T) update(attribute.getNameParts(), newValue);
return (isNull(oldValue)) ? null : oldValue;
}
public void remove(AttributeSensor<?> attribute) {
if (log.isDebugEnabled()) {
log.debug("removing attribute {} on {}", attribute.getName(), entity);
}
remove(attribute.getNameParts());
}
// TODO path must be ordered(and legal to contain duplicates like "a.b.a"; list would be better
public void remove(Collection<String> path) {
checkPath(path);
if (log.isTraceEnabled()) {
log.trace("removing sensor {} for {}", new Object[] {path, entity});
}
values.remove(path);
}
/**
* Gets the value
*
* @param path the path of the value to get
* @return the value
* @throws IllegalArgumentException path is null or empty.
*/
public Object getValue(Collection<String> path) {
// TODO previously this would return a map of the sub-tree if the path matched a prefix of a group of sensors,
// or the leaf value if only one value. Arguably that is not required - what is/was the use-case?
//
checkPath(path);
Object result = values.get(path);
return (isNull(result)) ? null : result;
}
@SuppressWarnings("unchecked")
public <T> T getValue(AttributeSensor<T> sensor) {
return (T) TypeCoercions.coerce(getValue(sensor.getNameParts()), sensor.getType());
}
@SuppressWarnings("unchecked")
private <T> T typedNull() {
return (T) Marker.NULL;
}
private boolean isNull(Object t) {
return t == Marker.NULL;
}
}
| apache-2.0 |
boonproject/boon | boon/src/test/java/org/boon/ClasspathsTest.java | 6689 | /*
* Copyright 2013-2014 Richard M. Hightower
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* __________ _____ __ .__
* \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____
* | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\
* | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ >
* |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ /
* \/ \/ \/ \/ \/ \//_____/
* ____. ___________ _____ ______________.___.
* | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | |
* | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | |
* /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ |
* \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______|
* \/ \/ \/ \/ \/ \/
*/
package org.boon;
import org.junit.Test;
import java.io.File;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import static org.boon.Boon.puts;
import static org.boon.Exceptions.die;
import static org.boon.primitive.Chr.multiply;
public class ClasspathsTest {
@Test
public void test() throws Exception {
final List<URL> urls = Classpaths.classpathResources( this.getClass(), "testfile.txt" );
URL url = urls.get( 0 );
boolean ok = true;
ok |= Str.in( "apple", IO.read( url.openStream() ) ) || die();
}
@Test
public void test1() throws Exception {
final List<URL> urls = Classpaths.classpathResources( this.getClass(), "org/node/file1.txt" );
URL url = urls.get( 0 );
boolean ok = true;
ok |= Str.in( "abc", IO.read( url.openStream() ) ) || die();
}
@Test
public void test2() throws Exception {
final List<URL> urls = Classpaths.classpathResources( this.getClass(), "/org/node/file1.txt" );
URL url = urls.get( 0 );
boolean ok = true;
ok |= Str.in( "abc", IO.read( url.openStream() ) ) || die();
}
@Test
public void test2NoRoot() throws Exception {
final List<URL> urls = Classpaths.classpathResources( this.getClass(), "org/node/file1.txt" );
URL url = urls.get( 0 );
boolean ok = true;
ok |= Str.in( "abc", IO.read( url.openStream() ) ) || die();
}
@Test
public void testResourcesFromPath() throws Exception {
final List<String> paths = Classpaths.resources( this.getClass(), "/org/node/file1.txt" );
String path = paths.get( 0 );
boolean ok = true;
ok |= Str.in( "abc", IO.read( path ) ) || die();
}
@Test //not root
public void testResourcesFromPathNoRoot() throws Exception {
final List<String> paths = Classpaths.resources( this.getClass(), "org/node/file1.txt" );
String path = paths.get( 0 );
boolean ok = true;
ok |= Str.in( "abc", IO.read( path ) ) || die();
}
@Test
public void testDirectory() throws Exception {
String someResource = "/org/node/";
File file = new File( "files/node-1.0-SNAPSHOT.jar" );
URL url1 = file.getAbsoluteFile().toURI().toURL();
URL url2 = new File( "files/invoke-1.0-SNAPSHOT.jar" ).getAbsoluteFile().toURI().toURL();
URLClassLoader loader = new URLClassLoader( new URL[]{ url1, url2 } );
final List<String> resourcePaths = Classpaths.listFromClassLoader(loader, someResource);
int directoryCount = 0;
for ( String path : resourcePaths ) {
if ( !Files.isDirectory( IO.path(path) ) ) {
die();
} else {
directoryCount++;
}
}
boolean ok = true;
// ok |= directoryCount == 3 || die(directoryCount);
}
@Test
public void testFileResources() throws Exception {
String someResource = "/org/node/resource.txt";
File file = new File( "files/node-1.0-SNAPSHOT.jar" );
URL url1 = file.getAbsoluteFile().toURI().toURL();
URL url2 = new File( "files/invoke-1.0-SNAPSHOT.jar" ).getAbsoluteFile().toURI().toURL();
URLClassLoader loader = new URLClassLoader( new URL[]{ url1, url2 } );
final List<String> resourcePaths = Classpaths.listFromClassLoader(loader, someResource);
int fileCount = 0;
int dirCount = 0;
for ( String path : resourcePaths ) {
if ( !Files.isDirectory( IO.path(path) ) ) {
fileCount++;
} else {
dirCount++;
}
}
boolean ok = true;
ok |= dirCount == 0 || die();
// ok |= fileCount == 2 || die();
}
@Test
public void testFileResources2() throws Exception {
String someResource = "/org/node/resource.txt";
File file = new File( "files/node-1.0-SNAPSHOT.jar" );
URL url1 = file.getAbsoluteFile().toURI().toURL();
URL url2 = new File( "files/invoke-1.0-SNAPSHOT.jar" ).getAbsoluteFile().toURI().toURL();
URLClassLoader loader = new URLClassLoader( new URL[]{ url1, url2 } );
final List<String> resourcePaths = Classpaths.listFromClassLoader(loader, someResource);
List<Path> list = IO.paths("classpath://org/node/");
//List<Path> list = Classpaths.pathsFromClassLoader(loader, someResource);
puts (multiply('-', 10), "Path ");
for (Path path : list) {
puts(path, path.getFileSystem(), path.getClass().getName());
if (path.toString().endsWith(".txt")) {
puts(IO.readPath(path));
}
}
puts (multiply('-', 10), "String Path ");
List<String> slist = IO.list("classpath://org/node/");
for (String spath : slist) {
puts(spath);
if (spath.toString().endsWith(".txt")) {
puts(IO.readResource(spath));
}
}
}
}
| apache-2.0 |
treasure-data/presto | presto-main/src/main/java/io/prestosql/sql/planner/sanity/TypeValidator.java | 7789 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.sanity;
import com.google.common.collect.ListMultimap;
import io.prestosql.Session;
import io.prestosql.execution.warnings.WarningCollector;
import io.prestosql.metadata.BoundSignature;
import io.prestosql.metadata.Metadata;
import io.prestosql.spi.type.Type;
import io.prestosql.sql.planner.SimplePlanVisitor;
import io.prestosql.sql.planner.Symbol;
import io.prestosql.sql.planner.TypeAnalyzer;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.planner.plan.AggregationNode;
import io.prestosql.sql.planner.plan.AggregationNode.Aggregation;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.ProjectNode;
import io.prestosql.sql.planner.plan.UnionNode;
import io.prestosql.sql.planner.plan.WindowNode;
import io.prestosql.sql.tree.Expression;
import io.prestosql.sql.tree.SymbolReference;
import io.prestosql.type.FunctionType;
import io.prestosql.type.TypeCoercion;
import io.prestosql.type.UnknownType;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
/**
* Ensures that all the expressions and FunctionCalls matches their output symbols
*/
public final class TypeValidator
implements PlanSanityChecker.Checker
{
@Override
public void validate(PlanNode plan, Session session, Metadata metadata, TypeAnalyzer typeAnalyzer, TypeProvider types, WarningCollector warningCollector)
{
plan.accept(new Visitor(session, metadata, typeAnalyzer, types, warningCollector), null);
}
private static class Visitor
extends SimplePlanVisitor<Void>
{
private final Session session;
private final TypeCoercion typeCoercion;
private final TypeAnalyzer typeAnalyzer;
private final TypeProvider types;
private final WarningCollector warningCollector;
public Visitor(Session session, Metadata metadata, TypeAnalyzer typeAnalyzer, TypeProvider types, WarningCollector warningCollector)
{
this.session = requireNonNull(session, "session is null");
this.typeCoercion = new TypeCoercion(metadata::getType);
this.typeAnalyzer = requireNonNull(typeAnalyzer, "typeAnalyzer is null");
this.types = requireNonNull(types, "types is null");
this.warningCollector = requireNonNull(warningCollector, "warningCollector is null");
}
@Override
public Void visitAggregation(AggregationNode node, Void context)
{
visitPlan(node, context);
AggregationNode.Step step = node.getStep();
for (Map.Entry<Symbol, Aggregation> entry : node.getAggregations().entrySet()) {
Symbol symbol = entry.getKey();
Aggregation aggregation = entry.getValue();
switch (step) {
case SINGLE:
checkSignature(symbol, aggregation.getResolvedFunction().getSignature());
checkCall(symbol, aggregation.getResolvedFunction().getSignature(), aggregation.getArguments());
break;
case FINAL:
checkSignature(symbol, aggregation.getResolvedFunction().getSignature());
break;
}
}
return null;
}
@Override
public Void visitWindow(WindowNode node, Void context)
{
visitPlan(node, context);
checkWindowFunctions(node.getWindowFunctions());
return null;
}
@Override
public Void visitProject(ProjectNode node, Void context)
{
visitPlan(node, context);
for (Map.Entry<Symbol, Expression> entry : node.getAssignments().entrySet()) {
Type expectedType = types.get(entry.getKey());
if (entry.getValue() instanceof SymbolReference) {
SymbolReference symbolReference = (SymbolReference) entry.getValue();
verifyTypeSignature(entry.getKey(), expectedType, types.get(Symbol.from(symbolReference)));
continue;
}
Type actualType = typeAnalyzer.getType(session, types, entry.getValue());
verifyTypeSignature(entry.getKey(), expectedType, actualType);
}
return null;
}
@Override
public Void visitUnion(UnionNode node, Void context)
{
visitPlan(node, context);
ListMultimap<Symbol, Symbol> symbolMapping = node.getSymbolMapping();
for (Symbol keySymbol : symbolMapping.keySet()) {
List<Symbol> valueSymbols = symbolMapping.get(keySymbol);
Type expectedType = types.get(keySymbol);
for (Symbol valueSymbol : valueSymbols) {
verifyTypeSignature(keySymbol, expectedType, types.get(valueSymbol));
}
}
return null;
}
private void checkWindowFunctions(Map<Symbol, WindowNode.Function> functions)
{
functions.forEach((symbol, function) -> {
checkSignature(symbol, function.getResolvedFunction().getSignature());
checkCall(symbol, function.getResolvedFunction().getSignature(), function.getArguments());
});
}
private void checkSignature(Symbol symbol, BoundSignature signature)
{
Type expectedType = types.get(symbol);
Type actualType = signature.getReturnType();
verifyTypeSignature(symbol, expectedType, actualType);
}
private void checkCall(Symbol symbol, BoundSignature signature, List<Expression> arguments)
{
Type expectedType = types.get(symbol);
Type actualType = signature.getReturnType();
verifyTypeSignature(symbol, expectedType, actualType);
checkArgument(signature.getArgumentTypes().size() == arguments.size(),
"expected %s arguments, but found %s arguments",
signature.getArgumentTypes().size(),
arguments.size());
for (int i = 0; i < arguments.size(); i++) {
Type expectedTypeSignature = signature.getArgumentTypes().get(i);
if (expectedTypeSignature instanceof FunctionType) {
continue;
}
Type actualTypeSignature = typeAnalyzer.getType(session, types, arguments.get(i));
verifyTypeSignature(symbol, expectedTypeSignature, actualTypeSignature);
}
}
private void verifyTypeSignature(Symbol symbol, Type expected, Type actual)
{
// UNKNOWN should be considered as a wildcard type, which matches all the other types
if (!(actual instanceof UnknownType) && !typeCoercion.isTypeOnlyCoercion(actual, expected)) {
checkArgument(expected.equals(actual), "type of symbol '%s' is expected to be %s, but the actual type is %s", symbol, expected, actual);
}
}
}
}
| apache-2.0 |
vvv1559/intellij-community | plugins/svn4idea/testSource/org/jetbrains/idea/SvnTestCase.java | 23686 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea;
import com.intellij.execution.process.ProcessOutput;
import com.intellij.ide.startup.impl.StartupManagerImpl;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.application.PluginPathManager;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.TestDialog;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.VcsShowConfirmationOption;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.ex.ProjectLevelVcsManagerEx;
import com.intellij.openapi.vcs.update.CommonUpdateProjectAction;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.events.VFileContentChangeEvent;
import com.intellij.testFramework.fixtures.IdeaTestFixtureFactory;
import com.intellij.testFramework.fixtures.TempDirTestFixture;
import com.intellij.testFramework.vcs.AbstractJunitVcsTestCase;
import com.intellij.testFramework.vcs.MockChangeListManagerGate;
import com.intellij.testFramework.vcs.MockChangelistBuilder;
import com.intellij.testFramework.vcs.TestClientRunner;
import com.intellij.util.Processor;
import com.intellij.util.TimeoutUtil;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.io.ZipUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.SvnApplicationSettings;
import org.jetbrains.idea.svn.SvnConfiguration;
import org.jetbrains.idea.svn.SvnFileUrlMappingImpl;
import org.jetbrains.idea.svn.SvnVcs;
import org.jetbrains.idea.svn.actions.CreateExternalAction;
import org.junit.After;
import org.junit.Before;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static com.intellij.openapi.vfs.VfsUtilCore.virtualToIoFile;
import static org.junit.Assert.*;
/**
* @author yole
*/
public abstract class SvnTestCase extends AbstractJunitVcsTestCase {
public static String ourGlobalTestDataDir;
public static Boolean ourGlobalUseNativeAcceleration;
protected TempDirTestFixture myTempDirFixture;
protected String myRepoUrl;
protected TestClientRunner myRunner;
protected String myWcRootName;
// TODO: Change this to explicitly run either with native acceleration or not.
// properties set through run configurations or different runners (like Suite) could be used
private boolean myUseNativeAcceleration = new GregorianCalendar().get(Calendar.HOUR_OF_DAY) % 2 == 0;
private String myTestDataDir;
private File myRepoRoot;
private File myWcRoot;
private ChangeListManagerGate myGate;
protected String myAnotherRepoUrl;
protected File myPluginRoot;
protected SvnTestCase(@NotNull String testDataDir) {
myTestDataDir = testDataDir;
myWcRootName = "wcroot";
}
public static void imitateEvent(VirtualFile dir) {
final VirtualFile child = dir.findChild(".svn");
assertNotNull(child);
final VirtualFile wcdb = child.findChild("wc.db");
assertNotNull(wcdb);
final BulkFileListener listener = ApplicationManager.getApplication().getMessageBus().syncPublisher(VirtualFileManager.VFS_CHANGES);
final VFileContentChangeEvent event =
new VFileContentChangeEvent(null, wcdb, wcdb.getModificationStamp() - 1, wcdb.getModificationStamp(), true);
final List<VFileContentChangeEvent> events = Collections.singletonList(event);
listener.before(events);
listener.after(events);
}
@Before
public void setUp() throws Exception {
System.out.println("Native client for status: " + isUseNativeAcceleration());
String property = System.getProperty("svn.test.data.directory");
if (!StringUtil.isEmpty(property)) {
myTestDataDir = property;
}
UIUtil.invokeAndWaitIfNeeded((Runnable)() -> {
try {
final IdeaTestFixtureFactory fixtureFactory = IdeaTestFixtureFactory.getFixtureFactory();
myTempDirFixture = fixtureFactory.createTempDirTestFixture();
myTempDirFixture.setUp();
myRepoRoot = new File(myTempDirFixture.getTempDirPath(), "svnroot");
boolean isRepoRootCreated = myRepoRoot.mkdir() || myRepoRoot.isDirectory();
assert isRepoRootCreated : myRepoRoot;
myPluginRoot = new File(PluginPathManager.getPluginHomePath("svn4idea"));
if (!myPluginRoot.isDirectory()) {
// try standalone mode
Class aClass = SvnTestCase.class;
String rootPath = PathManager.getResourceRoot(aClass, "/" + aClass.getName().replace('.', '/') + ".class");
myPluginRoot = new File(rootPath).getParentFile().getParentFile().getParentFile();
}
File svnBinDir = new File(myPluginRoot, getTestDataDir() + "/svn/bin");
File svnExecutable = null;
if (SystemInfo.isWindows) {
svnExecutable = new File(svnBinDir, "windows/svn.exe");
}
else if (SystemInfo.isLinux) {
svnExecutable = new File(svnBinDir, "linux/svn");
}
else if (SystemInfo.isMac) {
svnExecutable = new File(svnBinDir, "mac/svn");
}
assertTrue("No Subversion executable was found: " + svnExecutable + ", " + SystemInfo.OS_NAME,
svnExecutable != null && svnExecutable.canExecute());
myClientBinaryPath = svnExecutable.getParentFile();
myRunner = SystemInfo.isMac
? createClientRunner(Collections.singletonMap("DYLD_LIBRARY_PATH", myClientBinaryPath.getPath()))
: createClientRunner();
ZipUtil.extract(new File(myPluginRoot, getTestDataDir() + "/svn/newrepo.zip"), myRepoRoot, null);
myWcRoot = new File(myTempDirFixture.getTempDirPath(), myWcRootName);
boolean isWcRootCreated = myWcRoot.mkdir() || myWcRoot.isDirectory();
assert isWcRootCreated : myWcRoot;
myRepoUrl = (SystemInfo.isWindows ? "file:///" : "file://") + FileUtil.toSystemIndependentName(myRepoRoot.getPath());
verify(runSvn("co", myRepoUrl, myWcRoot.getPath()));
initProject(myWcRoot, this.getTestName());
activateVCS(SvnVcs.VCS_NAME);
myGate = new MockChangeListManagerGate(ChangeListManager.getInstance(myProject));
((StartupManagerImpl) StartupManager.getInstance(myProject)).runPostStartupActivities();
refreshSvnMappingsSynchronously();
}
catch (Exception e) {
throw new RuntimeException(e);
}
});
// there should be kind-a waiting for after change list manager finds all changes and runs inner refresh of copies in the above method
if (myInitChangeListManager) {
ChangeListManager changeListManager = ChangeListManager.getInstance(myProject);
VcsDirtyScopeManager.getInstance(myProject).markEverythingDirty();
changeListManager.ensureUpToDate(false);
}
}
protected void refreshSvnMappingsSynchronously() {
final SvnVcs vcs = SvnVcs.getInstance(myProject);
if (! myInitChangeListManager) {
return;
}
final Semaphore semaphore = new Semaphore();
semaphore.down();
((SvnFileUrlMappingImpl) vcs.getSvnFileUrlMapping()).realRefresh(() -> semaphore.up());
semaphore.waitFor();
}
@Override
protected void projectCreated() {
SvnConfiguration.getInstance(myProject).setUseAcceleration(
isUseNativeAcceleration() ? SvnConfiguration.UseAcceleration.commandLine : SvnConfiguration.UseAcceleration.nothing);
SvnApplicationSettings.getInstance().setCommandLinePath(myClientBinaryPath + File.separator + "svn");
}
@After
public void tearDown() throws Exception {
((ChangeListManagerImpl) ChangeListManager.getInstance(myProject)).stopEveryThingIfInTestMode();
sleep(100);
UIUtil.invokeAndWaitIfNeeded((Runnable)() -> {
try {
tearDownProject();
if (myWcRoot != null && myWcRoot.exists()) {
FileUtil.delete(myWcRoot);
}
if (myRepoRoot != null && myRepoRoot.exists()) {
FileUtil.delete(myRepoRoot);
}
if (myTempDirFixture != null) {
myTempDirFixture.tearDown();
myTempDirFixture = null;
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
});
}
protected ProcessOutput runSvn(String... commandLine) throws IOException {
return myRunner.runClient("svn", null, myWcRoot, commandLine);
}
protected void enableSilentOperation(final VcsConfiguration.StandardConfirmation op) {
setStandardConfirmation(SvnVcs.VCS_NAME, op, VcsShowConfirmationOption.Value.DO_ACTION_SILENTLY);
}
protected void disableSilentOperation(final VcsConfiguration.StandardConfirmation op) {
setStandardConfirmation(SvnVcs.VCS_NAME, op, VcsShowConfirmationOption.Value.DO_NOTHING_SILENTLY);
}
protected void checkin() throws IOException {
runInAndVerifyIgnoreOutput("ci", "-m", "test");
}
protected void update() throws IOException {
runInAndVerifyIgnoreOutput("up");
}
protected List<Change> getChangesInScope(final VcsDirtyScope dirtyScope) throws VcsException {
ChangeProvider changeProvider = SvnVcs.getInstance(myProject).getChangeProvider();
MockChangelistBuilder builder = new MockChangelistBuilder();
changeProvider.getChanges(dirtyScope, builder, new EmptyProgressIndicator(), myGate);
return builder.getChanges();
}
protected void undo() {
UIUtil.invokeAndWaitIfNeeded((Runnable)() -> {
final TestDialog oldTestDialog = Messages.setTestDialog(TestDialog.OK);
try {
UndoManager.getInstance(myProject).undo(null);
}
finally {
Messages.setTestDialog(oldTestDialog);
}
});
}
protected void prepareInnerCopy(final boolean anotherRepository) throws Exception {
final String mainUrl = myRepoUrl + "/root/source";
final String externalURL;
if (anotherRepository) {
createAnotherRepo();
externalURL = myAnotherRepoUrl + "/root/target";
} else {
externalURL = myRepoUrl + "/root/target";
}
final ChangeListManagerImpl clManager = (ChangeListManagerImpl)ChangeListManager.getInstance(myProject);
final SubTree subTree = new SubTree(myWorkingCopyDir);
checkin();
clManager.stopEveryThingIfInTestMode();
sleep(100);
final File rootFile = virtualToIoFile(subTree.myRootDir);
FileUtil.delete(rootFile);
FileUtil.delete(new File(myWorkingCopyDir.getPath() + File.separator + ".svn"));
assertTrue(!rootFile.exists());
sleep(200);
myWorkingCopyDir.refresh(false, true);
runInAndVerifyIgnoreOutput("co", mainUrl);
final File sourceDir = new File(myWorkingCopyDir.getPath(), "source");
final File innerDir = new File(sourceDir, "inner1/inner2/inner");
runInAndVerifyIgnoreOutput("co", externalURL, innerDir.getPath());
sleep(100);
myWorkingCopyDir.refresh(false, true);
// above is preparation
// start change list manager again
clManager.forceGoInTestMode();
refreshSvnMappingsSynchronously();
//clManager.ensureUpToDate(false);
//clManager.ensureUpToDate(false);
}
public String getTestDataDir() {
return StringUtil.isEmpty(ourGlobalTestDataDir) ? myTestDataDir : ourGlobalTestDataDir;
}
public void setTestDataDir(String testDataDir) {
myTestDataDir = testDataDir;
}
public boolean isUseNativeAcceleration() {
return ourGlobalUseNativeAcceleration != null ? ourGlobalUseNativeAcceleration : myUseNativeAcceleration;
}
public void setUseNativeAcceleration(boolean useNativeAcceleration) {
myUseNativeAcceleration = useNativeAcceleration;
}
protected class SubTree {
public VirtualFile myRootDir;
public VirtualFile mySourceDir;
public VirtualFile myTargetDir;
public VirtualFile myS1File;
public VirtualFile myS2File;
public final List<VirtualFile> myTargetFiles;
public static final String ourS1Contents = "123";
public static final String ourS2Contents = "abc";
private VirtualFile findOrCreateChild(final VirtualFile parent, final String name, final String content) {
final VirtualFile result = parent.findChild(name);
if (result != null) return result;
if (content == null) {
return createDirInCommand(parent, name);
} else {
return createFileInCommand(parent, name, content);
}
}
public SubTree(final VirtualFile base) {
myRootDir = findOrCreateChild(base, "root", null);
mySourceDir = findOrCreateChild(myRootDir, "source", null);
myS1File = findOrCreateChild(mySourceDir, "s1.txt", ourS1Contents);
myS2File = findOrCreateChild(mySourceDir, "s2.txt", ourS2Contents);
myTargetDir = findOrCreateChild(myRootDir, "target", null);
myTargetFiles = new ArrayList<>();
for (int i = 0; i < 10; i++) {
myTargetFiles.add(findOrCreateChild(myTargetDir, "t" + (i + 10) + ".txt", ourS1Contents));
}
}
}
protected static void sleep(final int millis) {
TimeoutUtil.sleep(millis);
}
public String prepareBranchesStructure() throws Exception {
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final String mainUrl = myRepoUrl + "/trunk";
runInAndVerifyIgnoreOutput("mkdir", "-m", "mkdir", mainUrl);
runInAndVerifyIgnoreOutput("mkdir", "-m", "mkdir", myRepoUrl + "/branches");
runInAndVerifyIgnoreOutput("mkdir", "-m", "mkdir", myRepoUrl + "/tags");
final ChangeListManagerImpl clManager = (ChangeListManagerImpl)ChangeListManager.getInstance(myProject);
clManager.stopEveryThingIfInTestMode();
sleep(100);
boolean deleted = false;
for (int i = 0; i < 5; i++) {
deleted = FileUtil.delete(new File(myWorkingCopyDir.getPath() + File.separator + ".svn"));
if (deleted) break;
sleep(200);
}
assertTrue(deleted);
sleep(200);
myWorkingCopyDir.refresh(false, true);
runInAndVerifyIgnoreOutput("co", mainUrl, myWorkingCopyDir.getPath());
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
final SubTree tree = new SubTree(myWorkingCopyDir);
checkin();
final String branchUrl = myRepoUrl + "/branches/b1";
runInAndVerifyIgnoreOutput("copy", "-q", "-m", "coppy", mainUrl, branchUrl);
clManager.forceGoInTestMode();
refreshSvnMappingsSynchronously();
//clManager.ensureUpToDate(false);
//clManager.ensureUpToDate(false);
return branchUrl;
}
public void prepareExternal() throws Exception {
prepareExternal(true, true, false);
}
public void prepareExternal(final boolean commitExternalDefinition, final boolean updateExternal,
final boolean anotherRepository) throws Exception {
final ChangeListManagerImpl clManager = (ChangeListManagerImpl)ChangeListManager.getInstance(myProject);
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final String mainUrl = myRepoUrl + "/root/source";
final String externalURL;
if (anotherRepository) {
createAnotherRepo();
externalURL = myAnotherRepoUrl + "/root/target";
} else {
externalURL = myRepoUrl + "/root/target";
}
final SubTree subTree = new SubTree(myWorkingCopyDir);
checkin();
clManager.stopEveryThingIfInTestMode();
sleep(100);
final File rootFile = virtualToIoFile(subTree.myRootDir);
FileUtil.delete(rootFile);
FileUtil.delete(new File(myWorkingCopyDir.getPath() + File.separator + ".svn"));
assertTrue(!rootFile.exists());
sleep(200);
myWorkingCopyDir.refresh(false, true);
final File sourceDir = new File(myWorkingCopyDir.getPath(), "source");
runInAndVerifyIgnoreOutput("co", mainUrl, sourceDir.getPath());
CreateExternalAction.addToExternalProperty(vcs, sourceDir, "external", externalURL);
sleep(100);
if (updateExternal) {
runInAndVerifyIgnoreOutput("up", sourceDir.getPath());
}
if (commitExternalDefinition) {
runInAndVerifyIgnoreOutput("ci", "-m", "test", sourceDir.getPath());
}
sleep(100);
if (updateExternal) {
myWorkingCopyDir.refresh(false, true);
assertTrue(new File(sourceDir, "external").exists());
}
// above is preparation
// start change list manager again
clManager.forceGoInTestMode();
refreshSvnMappingsSynchronously();
//clManager.ensureUpToDate(false);
//clManager.ensureUpToDate(false);
}
protected void createAnotherRepo() throws Exception {
final File repo = FileUtil.createTempDirectory("anotherRepo", "");
FileUtil.delete(repo);
FileUtil.copyDir(myRepoRoot, repo);
myAnotherRepoUrl = (SystemInfo.isWindows ? "file:///" : "file://") + FileUtil.toSystemIndependentName(repo.getPath());
final File tmpWc = FileUtil.createTempDirectory("hhh", "");
runInAndVerifyIgnoreOutput("co", myAnotherRepoUrl, tmpWc.getPath());
final VirtualFile tmpWcVf = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(tmpWc);
assertNotNull(tmpWcVf);
final SubTree tree = new SubTree(tmpWcVf);
runInAndVerifyIgnoreOutput(tmpWc, "add", "root");
runInAndVerifyIgnoreOutput(tmpWc, "ci", "-m", "fff");
FileUtil.delete(tmpWc);
}
protected static void imitUpdate(final Project project) {
ProjectLevelVcsManagerEx.getInstanceEx(project).getOptions(VcsConfiguration.StandardOption.UPDATE).setValue(false);
final CommonUpdateProjectAction action = new CommonUpdateProjectAction();
action.getTemplatePresentation().setText("1");
action.actionPerformed(new AnActionEvent(null,
dataId -> {
if (CommonDataKeys.PROJECT.is(dataId)) {
return project;
}
return null;
}, "test", new Presentation(), ActionManager.getInstance(), 0));
final ChangeListManager clManager = ChangeListManager.getInstance(project);
clManager.ensureUpToDate(false);
clManager.ensureUpToDate(false); // wait for after-events like annotations recalculation
sleep(100); // zipper updater
}
protected void runAndVerifyStatusSorted(final String... stdoutLines) throws IOException {
runStatusAcrossLocks(myWcRoot, true, stdoutLines);
}
protected void runAndVerifyStatus(final String... stdoutLines) throws IOException {
runStatusAcrossLocks(myWcRoot, false, stdoutLines);
}
private void runStatusAcrossLocks(@Nullable File workingDir, final boolean sorted, final String... stdoutLines) throws IOException {
final Processor<ProcessOutput> primitiveVerifier = output -> {
if (sorted) {
verifySorted(output, stdoutLines); // will assert if err not empty
} else {
verify(output, stdoutLines); // will assert if err not empty
}
return false;
};
runAndVerifyAcrossLocks(workingDir, new String[]{"status"}, output -> {
final List<String> lines = output.getStdoutLines();
for (String line : lines) {
if (line.trim().startsWith("L")) {
return true; // i.e. continue tries
}
}
primitiveVerifier.process(output);
return false;
}, primitiveVerifier);
}
protected void runInAndVerifyIgnoreOutput(final String... inLines) throws IOException {
final Processor<ProcessOutput> verifier = createPrimitiveExitCodeVerifier();
runAndVerifyAcrossLocks(myWcRoot, myRunner, inLines, verifier, verifier);
}
private static Processor<ProcessOutput> createPrimitiveExitCodeVerifier() {
return output -> {
assertEquals(output.getStderr(), 0, output.getExitCode());
return false;
};
}
public static void runInAndVerifyIgnoreOutput(File workingDir, final TestClientRunner runner, final String[] input, final String... stdoutLines) throws IOException {
final Processor<ProcessOutput> verifier = createPrimitiveExitCodeVerifier();
runAndVerifyAcrossLocks(workingDir, runner, input, verifier, verifier);
}
protected void runInAndVerifyIgnoreOutput(final File root, final String... inLines) throws IOException {
final Processor<ProcessOutput> verifier = createPrimitiveExitCodeVerifier();
runAndVerifyAcrossLocks(root, myRunner, inLines, verifier, verifier);
}
private void runAndVerifyAcrossLocks(@Nullable File workingDir, final String[] input, final Processor<ProcessOutput> verifier,
final Processor<ProcessOutput> primitiveVerifier) throws IOException {
workingDir = workingDir == null ? myWcRoot : workingDir;
runAndVerifyAcrossLocks(workingDir, myRunner, input, verifier, primitiveVerifier);
}
/**
* @param verifier - if returns true, try again
*/
public static void runAndVerifyAcrossLocks(File workingDir, final TestClientRunner runner, final String[] input,
final Processor<ProcessOutput> verifier, final Processor<ProcessOutput> primitiveVerifier) throws IOException {
for (int i = 0; i < 5; i++) {
final ProcessOutput output = runner.runClient("svn", null, workingDir, input);
if (output.getExitCode() == 0) {
if (verifier.process(output)) {
continue;
}
return;
}
if (! StringUtil.isEmptyOrSpaces(output.getStderr())) {
final String stderr = output.getStderr();
/*svn: E155004: Working copy '' locked.
svn: E155004: '' is already locked.
svn: run 'svn cleanup' to remove locks (type 'svn help cleanup' for details)*/
if (stderr.contains("E155004") && stderr.contains("is already locked")) {
continue;
}
}
// will throw assertion
if (verifier.process(output)) {
continue;
}
return;
}
final ProcessOutput output = runner.runClient("svn", null, workingDir, input);
primitiveVerifier.process(output);
}
protected void setNativeAcceleration(final boolean value) {
System.out.println("Set native acceleration to " + value);
SvnConfiguration.getInstance(myProject).setUseAcceleration(
value ? SvnConfiguration.UseAcceleration.commandLine : SvnConfiguration.UseAcceleration.nothing);
SvnApplicationSettings.getInstance().setCommandLinePath(myClientBinaryPath + File.separator + "svn");
}
}
| apache-2.0 |
ronghuaxiang/Timo | src/main/parser/fm/liu/timo/parser/ast/fragment/tableref/NaturalJoin.java | 2100 | /*
* Copyright 1999-2012 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/**
* (created at 2011-2-9)
*/
package fm.liu.timo.parser.ast.fragment.tableref;
import fm.liu.timo.parser.visitor.Visitor;
/**
* @author <a href="mailto:shuo.qius@alibaba-inc.com">QIU Shuo</a>
*/
public class NaturalJoin implements TableReference {
private final boolean isOuter;
/**
* make sense only if {@link #isOuter} is true. Eigher <code>LEFT</code> or <code>RIGHT</code>
*/
private final boolean isLeft;
private final TableReference leftTableRef;
private final TableReference rightTableRef;
public NaturalJoin(boolean isOuter, boolean isLeft, TableReference leftTableRef,
TableReference rightTableRef) {
super();
this.isOuter = isOuter;
this.isLeft = isLeft;
this.leftTableRef = leftTableRef;
this.rightTableRef = rightTableRef;
}
public boolean isOuter() {
return isOuter;
}
public boolean isLeft() {
return isLeft;
}
public TableReference getLeftTableRef() {
return leftTableRef;
}
public TableReference getRightTableRef() {
return rightTableRef;
}
@Override
public Object removeLastConditionElement() {
return null;
}
@Override
public boolean isSingleTable() {
return false;
}
@Override
public int getPrecedence() {
return TableReference.PRECEDENCE_JOIN;
}
@Override
public void accept(Visitor visitor) {
visitor.visit(this);
}
}
| apache-2.0 |
sudheeshkatkam/incubator-calcite | core/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableLimit.java | 4816 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.enumerable;
import org.apache.calcite.linq4j.tree.BlockBuilder;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelCollationTraitDef;
import org.apache.calcite.rel.RelDistribution;
import org.apache.calcite.rel.RelDistributionTraitDef;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelWriter;
import org.apache.calcite.rel.SingleRel;
import org.apache.calcite.rel.metadata.RelMdCollation;
import org.apache.calcite.rel.metadata.RelMdDistribution;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.util.BuiltInMethod;
import com.google.common.base.Supplier;
import java.util.List;
/** Relational expression that applies a limit and/or offset to its input. */
public class EnumerableLimit extends SingleRel implements EnumerableRel {
private final RexNode offset;
private final RexNode fetch;
/** Creates an EnumerableLimit.
*
* <p>Use {@link #create} unless you know what you're doing. */
public EnumerableLimit(
RelOptCluster cluster,
RelTraitSet traitSet,
RelNode input,
RexNode offset,
RexNode fetch) {
super(cluster, traitSet, input);
this.offset = offset;
this.fetch = fetch;
assert getConvention() instanceof EnumerableConvention;
assert getConvention() == input.getConvention();
}
/** Creates an EnumerableLimit. */
public static EnumerableLimit create(final RelNode input, RexNode offset,
RexNode fetch) {
final RelOptCluster cluster = input.getCluster();
final RelTraitSet traitSet =
cluster.traitSetOf(EnumerableConvention.INSTANCE)
.replaceIfs(
RelCollationTraitDef.INSTANCE,
new Supplier<List<RelCollation>>() {
public List<RelCollation> get() {
return RelMdCollation.limit(input);
}
})
.replaceIf(RelDistributionTraitDef.INSTANCE,
new Supplier<RelDistribution>() {
public RelDistribution get() {
return RelMdDistribution.limit(input);
}
});
return new EnumerableLimit(cluster, traitSet, input, offset, fetch);
}
@Override public EnumerableLimit copy(
RelTraitSet traitSet,
List<RelNode> newInputs) {
return new EnumerableLimit(
getCluster(),
traitSet,
sole(newInputs),
offset,
fetch);
}
@Override public RelWriter explainTerms(RelWriter pw) {
return super.explainTerms(pw)
.itemIf("offset", offset, offset != null)
.itemIf("fetch", fetch, fetch != null);
}
public Result implement(EnumerableRelImplementor implementor, Prefer pref) {
final BlockBuilder builder = new BlockBuilder();
final EnumerableRel child = (EnumerableRel) getInput();
final Result result = implementor.visitChild(this, 0, child, pref);
final PhysType physType =
PhysTypeImpl.of(
implementor.getTypeFactory(),
getRowType(),
result.format);
Expression v = builder.append("child", result.block);
if (offset != null) {
v = builder.append(
"offset",
Expressions.call(
v,
BuiltInMethod.SKIP.method,
Expressions.constant(RexLiteral.intValue(offset))));
}
if (fetch != null) {
v = builder.append(
"fetch",
Expressions.call(
v,
BuiltInMethod.TAKE.method,
Expressions.constant(RexLiteral.intValue(fetch))));
}
builder.add(
Expressions.return_(
null,
v));
return implementor.result(physType, builder.toBlock());
}
}
// End EnumerableLimit.java
| apache-2.0 |
GoogleChromeLabs/chromeos_smart_card_connector | third_party/closure-compiler/src/test/com/google/javascript/jscomp/CombinedCompilerPassTest.java | 7945 | /*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class CombinedCompilerPassTest {
private Compiler compiler;
/**
* Returns a Node tree with the post-order traversal a b c d e f g h i j k l m
* and the in-order traversal m d a b c h e f g l i j k:
*
* m
* ,---------|---------.
* d h l
* ,--|--. ,--|--. ,--|--.
* a b c e f g i j k
*
*/
private static Node createPostOrderAlphabet() {
Node a = Node.newString("a");
Node b = Node.newString("b");
Node c = Node.newString("c");
Node d = Node.newString("d");
Node e = Node.newString("e");
Node f = Node.newString("f");
Node g = Node.newString("g");
Node h = Node.newString("h");
Node i = Node.newString("i");
Node j = Node.newString("j");
Node k = Node.newString("k");
Node l = Node.newString("l");
Node m = Node.newString("m");
d.addChildToBack(a);
d.addChildToBack(b);
d.addChildToBack(c);
h.addChildToBack(e);
h.addChildToBack(f);
h.addChildToBack(g);
l.addChildToBack(i);
l.addChildToBack(j);
l.addChildToBack(k);
m.addChildToBack(d);
m.addChildToBack(h);
m.addChildToBack(l);
return m;
}
@Before
public void setUp() throws Exception {
compiler = new Compiler();
compiler.initOptions(new CompilerOptions());
}
/**
* Concatenates contents of string nodes encountered in pre-order and post-order traversals.
* Abbreviates traversals by ignoring subtrees rooted with specified strings.
*/
private static class ConcatTraversal implements NodeTraversal.Callback {
private final StringBuilder visited = new StringBuilder();
private final StringBuilder shouldTraversed = new StringBuilder();
private final Set<String> ignoring = new HashSet<>();
ConcatTraversal ignore(String s) {
ignoring.add(s);
return this;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
assertThat(n.getToken()).isEqualTo(Token.STRINGLIT);
visited.append(n.getString());
}
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
assertThat(n.getToken()).isEqualTo(Token.STRINGLIT);
shouldTraversed.append(n.getString());
return !ignoring.contains(n.getString());
}
/** Returns strings concatenated during post-order traversal. */
String getVisited() {
return visited.toString();
}
/** Returns strings concatenated during pre-order traversal. */
String getShouldTraversed() {
return shouldTraversed.toString();
}
Collection<String> getIgnoring() {
return ignoring;
}
}
/**
* Collection of data for a traversal test. Contains the traversal callback
* and the expected pre- and post-order traversal results.
*/
private static class TestHelper {
private final ConcatTraversal traversal;
private final String expectedVisited;
private final String shouldTraverseExpected;
TestHelper(ConcatTraversal traversal, String expectedVisited,
String shouldTraverseExpected) {
this.traversal = traversal;
this.expectedVisited = expectedVisited;
this.shouldTraverseExpected = shouldTraverseExpected;
}
ConcatTraversal getTraversal() {
return traversal;
}
void checkResults() {
assertWithMessage(
"ConcatTraversal ignoring "
+ traversal.getIgnoring()
+ " has unexpected visiting order")
.that(traversal.getVisited())
.isEqualTo(expectedVisited);
assertWithMessage(
"ConcatTraversal ignoring "
+ traversal.getIgnoring()
+ " has unexpected traversal order")
.that(traversal.getShouldTraversed())
.isEqualTo(shouldTraverseExpected);
}
}
private static List<TestHelper> createStringTests() {
List<TestHelper> tests = new ArrayList<>();
tests.add(new TestHelper(
new ConcatTraversal(), "abcdefghijklm", "mdabchefglijk"));
tests.add(new TestHelper(
new ConcatTraversal().ignore("d"), "efghijklm", "mdhefglijk"));
tests.add(new TestHelper(
new ConcatTraversal().ignore("f"), "abcdeghijklm", "mdabchefglijk"));
tests.add(new TestHelper(new ConcatTraversal().ignore("m"), "", "m"));
return tests;
}
@Test
public void testIndividualPasses() {
for (TestHelper test : createStringTests()) {
CombinedCompilerPass pass =
new CombinedCompilerPass(compiler, test.getTraversal());
pass.process(null, createPostOrderAlphabet());
test.checkResults();
}
}
@Test
public void testCombinedPasses() {
List<TestHelper> tests = createStringTests();
NodeTraversal.Callback[] callbacks = new NodeTraversal.Callback[tests.size()];
int i = 0;
for (TestHelper test : tests) {
callbacks[i++] = test.getTraversal();
}
CombinedCompilerPass pass =
new CombinedCompilerPass(compiler, callbacks);
pass.process(null, createPostOrderAlphabet());
for (TestHelper test : tests) {
test.checkResults();
}
}
/**
* Records the scopes visited during an AST traversal. Abbreviates traversals
* by ignoring subtrees rooted with specified NAME nodes.
*/
private static class ScopeRecordingCallback implements ScopedCallback {
Set<Node> visitedScopes = new HashSet<>();
Set<String> ignoring = new HashSet<>();
void ignore(String name) {
ignoring.add(name);
}
@Override
public void enterScope(NodeTraversal t) {
visitedScopes.add(t.getScopeRoot());
}
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
return !n.isName() || !ignoring.contains(n.getString());
}
Set<Node> getVisitedScopes() {
return visitedScopes;
}
@Override
public void exitScope(NodeTraversal t) {
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
}
}
@Test
public void testScopes() {
Node root =
compiler.parseTestCode("var y = function() { var x = function() { };}");
ScopeRecordingCallback c1 = new ScopeRecordingCallback();
c1.ignore("y");
ScopeRecordingCallback c2 = new ScopeRecordingCallback();
c2.ignore("x");
ScopeRecordingCallback c3 = new ScopeRecordingCallback();
CombinedCompilerPass pass = new CombinedCompilerPass(compiler, c1, c2, c3);
pass.process(null, root);
assertThat(c1.getVisitedScopes()).hasSize(1);
assertThat(c2.getVisitedScopes()).hasSize(3);
assertThat(c3.getVisitedScopes()).hasSize(5);
}
}
| apache-2.0 |
paulnguyen/cmpe279 | modules/module10/case-studies/dukes-bookstore/src/main/java/javaeetutorial/dukesbookstore/ejb/ConfigBean.java | 1706 | /**
* Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved.
*
* You may not modify, use, reproduce, or distribute this software except in
* compliance with the terms of the License at:
* http://java.net/projects/javaeetutorial/pages/BerkeleyLicense
*/
package javaeetutorial.dukesbookstore.ejb;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.ejb.Singleton;
import javax.ejb.Startup;
/**
* <p>Singleton bean that initializes the book database for the bookstore
* example.</p>
*/
@Singleton
@Startup
public class ConfigBean {
@EJB
private BookRequestBean request;
@PostConstruct
public void createData() {
request.createBook("201", "Duke", "",
"My Early Years: Growing Up on *7",
30.75, false, 2005, "What a cool book.", 20);
request.createBook("202", "Jeeves", "",
"Web Servers for Fun and Profit", 40.75, true,
2010, "What a cool book.", 20);
request.createBook("203", "Masterson", "Webster",
"Web Components for Web Developers",
27.75, false, 2010, "What a cool book.", 20);
request.createBook("205", "Novation", "Kevin",
"From Oak to Java: The Revolution of a Language",
10.75, true, 2008, "What a cool book.", 20);
request.createBook("206", "Thrilled", "Ben",
"The Green Project: Programming for Consumer Devices",
30.00, true, 2008, "What a cool book.", 20);
request.createBook("207", "Coding", "Happy",
"Java Intermediate Bytecodes", 30.95, true,
2010, "What a cool book.", 20);
}
}
| apache-2.0 |
roberthafner/flowable-engine | modules/flowable5-test/src/main/java/org/activiti5/engine/test/mock/Mocks.java | 2413 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti5.engine.test.mock;
import java.util.HashMap;
import java.util.Map;
/**
* Registry for mock objects.
*
* <p>
* Usage: <code>Mocks.register("myMock", myMock);</code>
* </p>
*
* <p>
* This class lets you register mock objects that will then be used by the {@link MockElResolver}. It binds a map of mock objects to ThreadLocal. This way, the mocks can be set up independent of how
* the process engine configuration is built.
* </p>
*
* @author Nils Preusker - n.preusker@gmail.com
*/
public class Mocks {
private static ThreadLocal<Map<String, Object>> mockContainer = new ThreadLocal<Map<String, Object>>();
private static Map<String, Object> getMocks() {
Map<String, Object> mocks = mockContainer.get();
if (mocks == null) {
mocks = new HashMap<String, Object>();
Mocks.mockContainer.set(mocks);
}
return mocks;
}
/**
* This method lets you register a mock object. Make sure to register the {@link MockExpressionManager} with your process engine configuration.
*
* @param key
* the key under which the mock object will be registered
* @param value
* the mock object
*/
public static void register(String key, Object value) {
getMocks().put(key, value);
}
/**
* This method returns the mock object registered under the provided key or null if there is no object for the provided key.
*
* @param key
* the key of the requested object
* @return the mock object registered under the provided key or null if there is no object for the provided key
*/
public static Object get(Object key) {
return getMocks().get(key);
}
/**
* This method resets the internal map of mock objects.
*/
public static void reset() {
if (getMocks() != null) {
getMocks().clear();
}
}
}
| apache-2.0 |
kiereleaseuser/optaplanner | optaplanner-core/src/main/java/org/optaplanner/core/api/score/constraint/primint/IntConstraintMatchTotal.java | 2889 | /*
* Copyright 2013 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.api.score.constraint.primint;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.kie.api.runtime.rule.RuleContext;
import org.optaplanner.core.api.score.constraint.ConstraintMatchTotal;
public class IntConstraintMatchTotal extends ConstraintMatchTotal {
protected final Set<IntConstraintMatch> constraintMatchSet;
protected int weightTotal;
public IntConstraintMatchTotal(String constraintPackage, String constraintName, int scoreLevel) {
super(constraintPackage, constraintName, scoreLevel);
constraintMatchSet = new HashSet<IntConstraintMatch>();
weightTotal = 0;
}
@Override
public Set<IntConstraintMatch> getConstraintMatchSet() {
return constraintMatchSet;
}
public int getWeightTotal() {
return weightTotal;
}
@Override
public Number getWeightTotalAsNumber() {
return weightTotal;
}
// ************************************************************************
// Worker methods
// ************************************************************************
public IntConstraintMatch addConstraintMatch(RuleContext kcontext, int weight) {
weightTotal += weight;
List<Object> justificationList = extractJustificationList(kcontext);
IntConstraintMatch constraintMatch = new IntConstraintMatch(this, justificationList, weight);
boolean added = constraintMatchSet.add(constraintMatch);
if (!added) {
throw new IllegalStateException("The constraintMatchTotal (" + this
+ ") could not add constraintMatch (" + constraintMatch
+ ") to its constraintMatchSet (" + constraintMatchSet + ").");
}
return constraintMatch;
}
public void removeConstraintMatch(IntConstraintMatch constraintMatch) {
weightTotal -= constraintMatch.getWeight();
boolean removed = constraintMatchSet.remove(constraintMatch);
if (!removed) {
throw new IllegalStateException("The constraintMatchTotal (" + this
+ ") could not remove constraintMatch (" + constraintMatch
+ ") from its constraintMatchSet (" + constraintMatchSet + ").");
}
}
}
| apache-2.0 |
xtuml/bptest | src/org.xtuml.bp.core.test/src/org/xtuml/bp/core/test/deployments/TerminatorUpdateTests.java | 38140 | package org.xtuml.bp.core.test.deployments;
import java.io.File;
import java.io.FileOutputStream;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.nio.file.Files;
import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.xtuml.bp.core.DataType_c;
import org.xtuml.bp.core.Deployment_c;
import org.xtuml.bp.core.Ooaofooa;
import org.xtuml.bp.core.TerminatorServiceParameter_c;
import org.xtuml.bp.core.TerminatorService_c;
import org.xtuml.bp.core.Terminator_c;
import org.xtuml.bp.core.ui.DeleteStaleServicesOnD_TERMAction;
import org.xtuml.bp.core.ui.Selection;
import org.xtuml.bp.test.common.BaseTest;
import org.xtuml.bp.test.common.OrderedRunner;
@RunWith(OrderedRunner.class)
public class TerminatorUpdateTests extends BaseTest {
private static final String[] PROJECT_NAMES = { "DeploymentsTests" };
private static final String BASE_INT_FILE = "/DeploymentsDomains/masl/DeploymentsDomain%d/DeploymentsDomain%d.int";
private static final String INT_FILE_TEMPLATE = "/DeploymentsDomains/masl/DeploymentsDomain%d/DeploymentsDomain%d_update%d.int";
@Before
public void setUp() throws Exception {
super.setUp();
for (String project : PROJECT_NAMES) {
loadProject(project);
}
modelRoot = Ooaofooa
.getInstance("/DeploymentsTests/models/DeploymentsTests/DeploymentsTests/DeploymentsTests.xtuml");
m_sys = modelRoot.getRoot();
setupExecutables();
}
/*
* Change return type
*/
@Test
public void testUpdate1() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 1));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 1) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc);
assertTrue("Incorrect return type.", null != s_dt && "integer".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "DeploymentsDomain1::MyEnum".equals(s_dt.getName()));
}
/*
* Change parameter type
*/
@Test
public void testUpdate2() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 2));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 2) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc);
assertTrue("Incorrect return type.", null != s_dt && "void".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "string".equals(s_dt.getName()));
}
/*
* Change parameter name
*/
@Test
public void testUpdate3() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 3));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 3) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc);
assertTrue("Incorrect return type.", null != s_dt && "void".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param2".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "DeploymentsDomain1::MyEnum".equals(s_dt.getName()));
}
/*
* Add parameter
*/
@Test
public void testUpdate4() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 4));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 4) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc);
assertTrue("Incorrect return type.", null != s_dt && "void".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "DeploymentsDomain1::MyEnum".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam2 = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "other_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam2);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam2);
assertTrue("Incorrect parameter type.", null != s_dt && "string".equals(s_dt.getName()));
}
/*
* Remove parameter
*/
@Test
public void testUpdate5() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 5));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 5) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc);
assertTrue("Incorrect return type.", null != s_dt && "void".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNull("Required terminator service parameter not removed.", reqSvcParam);
}
/*
* Change service name
*/
@Test
public void testUpdate6() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 6));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 6) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1_new".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc);
assertTrue("Incorrect return type.", null != s_dt && "void".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "DeploymentsDomain1::MyEnum".equals(s_dt.getName()));
TerminatorService_c reqSvcStale = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertTrue("Required terminator service missing.", null != reqSvcStale && reqSvcStale.getIs_stale());
// trigger the removal of stale services
Selection.getInstance().clear();
Selection.getInstance().addToSelection(requiredTerm);
DeleteStaleServicesOnD_TERMAction deleteStaleAction = new DeleteStaleServicesOnD_TERMAction();
deleteStaleAction.run(null);
requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
reqSvcStale = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNull("Stale service not removed.", reqSvcStale);
}
/*
* Add service
*/
@Test
public void testUpdate7() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 7));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 7) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc);
assertTrue("Incorrect return type.", null != s_dt && "void".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "DeploymentsDomain1::MyEnum".equals(s_dt.getName()));
TerminatorService_c reqSvc2 = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service2".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc2);
s_dt = DataType_c.getOneS_DTOnR1656(reqSvc2);
assertTrue("Incorrect return type.", null != s_dt && "void".equals(s_dt.getName()));
TerminatorServiceParameter_c reqSvcParam2 = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc2,
(selected) -> "int_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam2);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam2);
assertTrue("Incorrect parameter type.", null != s_dt && "integer".equals(s_dt.getName()));
}
/*
* Remove service
*/
@Test
public void testUpdate8() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(1);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 8));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 1, 1, 8) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertTrue("Required terminator service missing.", null != reqSvc && reqSvc.getIs_stale());
// trigger the removal of stale services
Selection.getInstance().clear();
Selection.getInstance().addToSelection(requiredTerm);
DeleteStaleServicesOnD_TERMAction deleteStaleAction = new DeleteStaleServicesOnD_TERMAction();
deleteStaleAction.run(null);
requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNull("Stale service not removed.", reqSvc);
}
/*
* Remove middle parameter from group of 3 parameters
*/
@Test
public void testUpdate9() throws Exception {
// import the terminator and do the basic test
Deployment_c deployment = basicTest(2);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 2, 2, 9));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action on updated file
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(INT_FILE_TEMPLATE, 2, 2, 9) })
.iterator());
action.run(null);
// check the resulting instances
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain2::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "param1".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "integer".equals(s_dt.getName()));
reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1654Precedes(reqSvcParam);
assertTrue("Required terminator service parameter missing.", null != reqSvcParam && "param3".equals(reqSvcParam.getName()));
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "integer".equals(s_dt.getName()));
}
private Deployment_c basicTest(int domainNum) throws Exception {
// get the deployment
Deployment_c deployment = Deployment_c.DeploymentInstance(modelRoot);
assertNotNull(deployment);
// check that the file exists
File testFile = new File(getTestModelRespositoryLocation() + String.format(BASE_INT_FILE, domainNum, domainNum));
assertTrue("Cannot access test file.", testFile.exists());
// trigger the import action
Selection.getInstance().clear();
Selection.getInstance().addToSelection(deployment);
ImportTerminatorsFromFileOnD_DEPLAction action = new ImportTerminatorsFromFileOnD_DEPLAction(
Arrays.asList(new String[] { getTestModelRespositoryLocation() + String.format(BASE_INT_FILE, domainNum, domainNum) }).iterator());
action.run(null);
// check the resulting instances
if (1 == domainNum) {
Terminator_c providedTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1".equals(((Terminator_c) selected).getName()));
assertTrue("Provided terminator missing.", null != providedTerm && providedTerm.getProvider());
TerminatorService_c provSvc = TerminatorService_c.getOneD_TSVCOnR1651(providedTerm,
(selected) -> "public_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Provided terminator service missing.", provSvc);
TerminatorServiceParameter_c provSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(provSvc,
(selected) -> "real_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Provided terminator service parameter missing.", provSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(provSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "real".equals(s_dt.getName()));
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain1::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "enum_param".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "DeploymentsDomain1::MyEnum".equals(s_dt.getName()));
}
else if (2 == domainNum) {
Terminator_c requiredTerm = Terminator_c.getOneD_TERMOnR1650(deployment,
(selected) -> "DeploymentsDomain2::term1".equals(((Terminator_c) selected).getName()));
assertTrue("Required terminator missing.", null != requiredTerm && !requiredTerm.getProvider());
TerminatorService_c reqSvc = TerminatorService_c.getOneD_TSVCOnR1651(requiredTerm,
(selected) -> "term_service1".equals(((TerminatorService_c) selected).getName()));
assertNotNull("Required terminator service missing.", reqSvc);
TerminatorServiceParameter_c reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1652(reqSvc,
(selected) -> "param1".equals(((TerminatorServiceParameter_c) selected).getName()));
assertNotNull("Required terminator service parameter missing.", reqSvcParam);
DataType_c s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "integer".equals(s_dt.getName()));
reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1654Precedes(reqSvcParam);
assertTrue("Required terminator service parameter missing.", null != reqSvcParam && "param2".equals(reqSvcParam.getName()));
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "integer".equals(s_dt.getName()));
reqSvcParam = TerminatorServiceParameter_c.getOneD_TSPARMOnR1654Precedes(reqSvcParam);
assertTrue("Required terminator service parameter missing.", null != reqSvcParam && "param3".equals(reqSvcParam.getName()));
s_dt = DataType_c.getOneS_DTOnR1653(reqSvcParam);
assertTrue("Incorrect parameter type.", null != s_dt && "integer".equals(s_dt.getName()));
}
return deployment;
}
private void setupExecutables() throws Exception {
final File mcDir = new File(System.getProperty("eclipse.home.location").replaceFirst("file:", "") + "/tools/mc");
if (!mcDir.exists()) {
assertTrue("Could not create necessary directories.", mcDir.mkdirs());
}
final File binDir = new File(mcDir, "bin");
if (!binDir.exists()) {
final String mcRepoPath = System.getenv("XTUML_DEVELOPMENT_REPOSITORY") + File.separator + "../mc";
assertTrue("Cannot find mc repository.", !"".equals(mcRepoPath));
File mcBinDir = new File(mcRepoPath + File.separator + "bin");
assertTrue("Cannot find mc bin directory.", mcBinDir.exists());
File antlrJar = new File(mcBinDir, "antlr-3.5.2-complete.jar");
URL antlrURL = new URL("http://central.maven.org/maven2/org/antlr/antlr-complete/3.5.2/antlr-complete-3.5.2.jar");
ReadableByteChannel rbc = Channels.newChannel(antlrURL.openStream());
FileOutputStream fos = new FileOutputStream(antlrJar);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
Files.createSymbolicLink(binDir.toPath(), mcBinDir.toPath());
}
}
}
| apache-2.0 |
trejkaz/derby | java/testing/org/apache/derbyTesting/functionTests/tests/store/Derby3625Test.java | 10723 | package org.apache.derbyTesting.functionTests.tests.store;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import junit.framework.Assert;
import junit.framework.Test;
import org.apache.derbyTesting.junit.BaseTestSuite;
import org.apache.derbyTesting.junit.CleanDatabaseTestSetup;
import org.apache.derbyTesting.junit.DatabasePropertyTestSetup;
/*
Class org.apache.derbyTesting.functionTests.tests.jdbc4.Derby3650Test
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
Test to reproduce DERBY-3625, failure in inline compress, in some
circumstances depending on exact size of data and state of pages during
the defragment phase.
Would throw following error:
ERROR XSDA3: Limitation: Record cannot be updated or inserted due to lack of
space on the page. Use the parameters derby.storage.pageSize and/or
derby.storage.pageReservedSpace to work around this limitation.^M
**/
public class Derby3625Test extends StoreBaseTest
{
/**************************************************************************
* Fields of the class
**************************************************************************
*/
/**************************************************************************
* Constructors for This class:
**************************************************************************
*/
/**************************************************************************
* Private/Protected methods of This class:
**************************************************************************
*/
/**************************************************************************
* Public Methods of This class:
**************************************************************************
*/
/**************************************************************************
* Public Methods of XXXX class:
**************************************************************************
*/
public Derby3625Test(String name)
{
super(name);
}
/**
* DERBY-3625 test case
* <p>
* Derby 3625 is caused by a bug where compress calculates the space
* needed to fit a moved row from page A to B, and assumes that the space
* required on page B is the same on page A. The problem is that in
* some cases due to the stored format of the changing record id the space
* required on B may be more than A. In the case where there is exactly
* enough space by the initial calculation the move fails because one or
* 3 more bytes may be necessary to make the move and the compress fails.
* <p>
* To test:
* fill page 1 with dummy rows, page 1 has a special control row on it
* so it can't ever be empty so use page 2 instead.
* fill page 2 with dummy rows such and empty it such that the
* next row id on it is greater that 64 which takes 2 bytes to store
* vs. 1 for rowid's less * that 64.
* fill page 3 and 4 with some dummy rows which will be deleted to give
* compress table room to work during defragment.
* fill page 4 with 2 rows which fit on page 2 with 1 byte stored record
* id's but will not fit with 2 byte stored record id's.
* These will not be deleted and the bug is exercised as
* defragment tries to move these rows to page 2 after it has
* been reclaimed as a free page.
**/
public void testTwoToOneByteCase()
throws SQLException
{
PreparedStatement insert_stmt =
prepareStatement("INSERT INTO testCompress VALUES(?, ?, ?)");
// page 0 - container info/bit map, does not affect test
// page 1 - fill it up and leave rows on it. page 1 has a special
// row on it that can never be deleted so this page never can be
// made free.
// insert one blob padded row that will fill page 1
byte[] pad_blob = new byte[32630];
insert_stmt.setInt( 1, 1);
insert_stmt.setBytes( 2, pad_blob);
insert_stmt.setString( 3, "page 1");
insert_stmt.executeUpdate();
// page 2 - fill it completely with enough rows such that future
// rows will force a 2 byte row id, ie. more than 64 rows. Later
// in this test all the rows will be deleted from this page so that
// the page is on the free list for compress defragment to use it.
pad_blob = new byte[302];
insert_stmt.setInt( 1, 2);
insert_stmt.setBytes( 2, pad_blob);
insert_stmt.setString( 3, "page 2");
for (int i = 0; i < 98; i++)
{
insert_stmt.executeUpdate();
}
// page 3 - fill it for another free page.
insert_stmt.setInt( 1, 3);
insert_stmt.setBytes( 2, pad_blob);
insert_stmt.setString( 3, "page 3");
for (int i = 0; i < 98; i++)
{
insert_stmt.executeUpdate();
}
// page 4 - 2 rows, with one byte free. When these are moved to
// a free page with bigger rowid's they will take 2 more bytes and
// will not both fit on the page.
//
// I didn't track it down, but for some reason I could not fill a page
// completely if there was only one row on the page, it kept turning
// the blob column into a long row. I was just picking magic numbers
// for the blob column to make it fit.
//
// With 2 rows I was able to fill the page up to one empty byte.
// Then with the bug the first row would move to page 2 which is
// now free but take one more byte than it did on this page. And
// finally when the second row was moved it would think it would fit
// but throw an exception when the rowid compressed version would
// cause it to be one byte bigger than the original row.
pad_blob = new byte[100];
insert_stmt.setInt( 1, 4);
insert_stmt.setBytes( 2, pad_blob);
insert_stmt.setString( 3, "page 4");
insert_stmt.executeUpdate();
pad_blob = new byte[32534];
insert_stmt.setInt( 1, 4);
insert_stmt.setBytes( 2, pad_blob);
insert_stmt.setString( 3, "page 4");
insert_stmt.executeUpdate();
commit();
int space_info[] = getSpaceInfo("APP", "TESTCOMPRESS", true);
// space after initial insert setup should be 4 pages
// 0 - container info - not reflected in allocated page count,
// 1 - dummy data left on the page,
// 2 - bunch of short records to be deleted to make free page
// 3 - bunch of short records to be deleted to make free page
// 4 - short and long record to exercise bug.
Assert.assertEquals(
"wrong allocated page count in test setup",
4, space_info[SPACE_INFO_NUM_ALLOC]);
Statement stmt = createStatement();
// Delete rows on page 2 and 3 to allow defragment to try and move
// the page 4 row up.
stmt.executeUpdate("DELETE FROM testCompress where id = 2 or id = 3");
commit();
// Before fixing the bug, this compress call would throw the
// following exception:
//
// ERROR XSDA3: Limitation: Record cannot be updated or inserted due
// to lack of space on the page. Use the parameters
// derby.storage.pageSize and/or derby.storage.pageReservedSpace to
// work around this limitation.
CallableStatement call_compress =
prepareCall(
"CALL SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE(?, ?, 1, 1, 1)");
call_compress.setString(1, "APP");
call_compress.setString(2, "TESTCOMPRESS");
call_compress.executeUpdate();
commit();
space_info = getSpaceInfo("APP", "TESTCOMPRESS", true);
// space after the test should be 3 pages:
// 0 - container info - not reflected in allocated page count,
// 1 - dummy data left on the page,
// 2 - one short record, but long record did not fit
// 3 - long record on an empty page.
Assert.assertEquals(
"wrong allocated page count", 3, space_info[SPACE_INFO_NUM_ALLOC]);
insert_stmt.close();
}
protected static Test baseSuite(String name)
{
BaseTestSuite suite = new BaseTestSuite(name);
suite.addTestSuite(Derby3625Test.class);
return new CleanDatabaseTestSetup(
DatabasePropertyTestSetup.setLockTimeouts(suite, 2, 4))
{
/**
* Creates the tables used in the test cases.
* @exception SQLException if a database error occurs
*/
protected void decorateSQL(Statement stmt) throws SQLException
{
Connection conn = stmt.getConnection();
CallableStatement set_dbprop = conn.prepareCall(
"CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(?, ?)");
set_dbprop.setString(1,"derby.storage.pageReservedSpace");
set_dbprop.setString(2,"0");
set_dbprop.executeUpdate();
// create a table, with blob it will be 32k page size
stmt.executeUpdate(
"CREATE TABLE testCompress " +
"(id int, padcol blob(1M), c varchar(200))");
set_dbprop.setString(2, null);
set_dbprop.executeUpdate();
set_dbprop.close();
conn.setAutoCommit(false);
}
};
}
public static Test suite()
{
BaseTestSuite suite = new BaseTestSuite("Derby3625Test");
suite.addTest(baseSuite("Derby36625Test:embedded"));
return suite;
}
}
| apache-2.0 |
xingguang2013/jbpm-designer | jbpm-designer-backend/src/test/java/org/jbpm/designer/web/preprocessing/impl/JbpmPreprocessingUnitVFSGitTest.java | 7199 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.designer.web.preprocessing.impl;
import org.jbpm.designer.helper.TestHttpServletRequest;
import org.jbpm.designer.helper.TestIDiagramProfile;
import org.jbpm.designer.helper.TestServletContext;
import org.jbpm.designer.repository.Asset;
import org.jbpm.designer.repository.AssetBuilderFactory;
import org.jbpm.designer.repository.Repository;
import org.jbpm.designer.repository.impl.AssetBuilder;
import org.jbpm.designer.repository.VFSFileSystemProducer;
import org.jbpm.designer.repository.vfs.RepositoryDescriptor;
import org.jbpm.designer.repository.vfs.VFSRepository;
import org.jbpm.designer.web.profile.impl.JbpmProfileImpl;
import org.junit.*;
import java.io.File;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class JbpmPreprocessingUnitVFSGitTest {
// TODO change it to generic independent path
private static final String REPOSITORY_ROOT = "designer-playground";
private static final String VFS_REPOSITORY_ROOT = "git://" + REPOSITORY_ROOT;
private static final String USERNAME = "guvnorngtestuser1";
private static final String PASSWORD = "test1234";
private static final String ORIGIN_URL = "https://github.com/mswiderski/designer-playground.git";
private static final String FETCH_COMMAND = "?fetch";
private JbpmProfileImpl profile;
private static String gitLocalClone = System.getProperty("java.io.tmpdir") + "git-repo";
private static Map<String, String> env = new HashMap<String, String>();
private static int counter = -100;
private RepositoryDescriptor descriptor;
private VFSFileSystemProducer producer = new VFSFileSystemProducer();
@BeforeClass
public static void prepare() {
env.put( "username", USERNAME );
env.put( "password", PASSWORD );
env.put( "origin", ORIGIN_URL );
env.put( "fetch.cmd", FETCH_COMMAND );
System.setProperty("org.kie.nio.git.dir", gitLocalClone);
}
@AfterClass
public static void cleanup() {
System.clearProperty("org.kie.nio.git.dir");
}
@Before
public void setup() {
profile = new JbpmProfileImpl();
producer = new VFSFileSystemProducer();
env.put("repository.root", VFS_REPOSITORY_ROOT);
env.put("repository.globaldir", "/global");
descriptor = producer.produceFileSystem(env);
}
private void deleteFiles(File directory) {
for (File file : directory.listFiles()) {
if (file.isDirectory()) {
deleteFiles(file);
}
file.delete();
}
}
@After
public void teardown() {
File repo = new File(gitLocalClone);
if(repo.exists()) {
deleteFiles(repo);
}
repo.delete();
repo = new File(".niogit");
if(repo.exists()) {
deleteFiles(repo);
}
repo.delete();
counter++;
}
@Test
public void testProprocess() {
Repository repository = new VFSRepository(producer.getIoService());
((VFSRepository)repository).setDescriptor(descriptor);
profile.setRepository(repository);
//prepare folders that will be used
repository.createDirectory("/myprocesses");
repository.createDirectory("/global");
// prepare process asset that will be used to preprocess
AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text);
builder.content("bpmn2 content")
.type("bpmn2")
.name("process")
.location("/myprocesses");
String uniqueId = repository.createAsset(builder.getAsset());
// create instance of preprocessing unit
JbpmPreprocessingUnit preprocessingUnitVFS = new JbpmPreprocessingUnit(new TestServletContext(), "/", null);
// setup parameters
Map<String, String> params = new HashMap<String, String>();
params.put("uuid", uniqueId);
// run preprocess
preprocessingUnitVFS.preprocess(new TestHttpServletRequest(params), null, new TestIDiagramProfile(repository), null, false, false, null, null);
// validate results
Collection<Asset> globalAssets = repository.listAssets("/global");
assertNotNull(globalAssets);
assertEquals(30, globalAssets.size());
repository.assetExists("/global/backboneformsinclude.fw");
repository.assetExists("/global/backbonejsinclude.fw");
repository.assetExists("/global/cancelbutton.fw");
repository.assetExists("/global/checkbox.fw");
repository.assetExists("/global/customeditors.json");
repository.assetExists("/global/div.fw");
repository.assetExists("/global/dropdownmenu.fw");
repository.assetExists("/global/fieldset.fw");
repository.assetExists("/global/form.fw");
repository.assetExists("/global/handlebarsinclude.fw");
repository.assetExists("/global/htmlbasepage.fw");
repository.assetExists("/global/image.fw");
repository.assetExists("/global/jqueryinclude.fw");
repository.assetExists("/global/jquerymobileinclude.fw");
repository.assetExists("/global/link.fw");
repository.assetExists("/global/mobilebasepage.fw");
repository.assetExists("/global/orderedlist.fw");
repository.assetExists("/global/passwordfield.fw");
repository.assetExists("/global/radiobutton.fw");
repository.assetExists("/global/script.fw");
repository.assetExists("/global/submitbutton.fw");
repository.assetExists("/global/table.fw");
repository.assetExists("/global/textarea.fw");
repository.assetExists("/global/textfield.fw");
repository.assetExists("/global/themes.json");
repository.assetExists("/global/unorderedlist.fw");
repository.assetExists("/global/defaultemailicon.gif");
repository.assetExists("/global/defaultlogicon.gif");
repository.assetExists("/global/defaultservicenodeicon.png");
repository.assetExists("/global/.gitignore");
Collection<Asset> defaultStuff = repository.listAssets("/myprocesses");
assertNotNull(defaultStuff);
assertEquals(3, defaultStuff.size());
repository.assetExists("/myprocesses/WorkDefinitions.wid");
// this is the process asset that was created for the test but let's check it anyway
repository.assetExists("/myprocesses/process.bpmn2");
repository.assetExists("/myprocesses/.gitignore");
}
}
| apache-2.0 |
AmesianX/binnavi | src/main/java/com/google/security/zynamics/binnavi/Gui/GraphWindows/Actions/CStepIntoHotkeyAction.java | 2410 | /*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.Gui.GraphWindows.Actions;
import java.awt.event.ActionEvent;
import javax.swing.AbstractAction;
import javax.swing.JFrame;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.Gui.Debug.ToolbarPanel.Implementations.CDebuggerFunctions;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.Panels.IFrontEndDebuggerProvider;
import com.google.security.zynamics.binnavi.debug.debugger.interfaces.IDebugger;
import com.google.security.zynamics.binnavi.debug.models.processmanager.TargetProcessThread;
/**
* Action class for handling the debugger Step Into hotkey.
*/
public final class CStepIntoHotkeyAction extends AbstractAction {
/**
* Used for serialization.
*/
private static final long serialVersionUID = 8120160330540294860L;
/**
* Parent window used for dialogs.
*/
private final JFrame m_parent;
/**
* Provides the active debugger.
*/
private final IFrontEndDebuggerProvider m_debugPerspectiveModel;
/**
* Creates a new action object.
*
* @param parent Parent window used for dialogs.
* @param panel Provides the active debugger.
*/
public CStepIntoHotkeyAction(final JFrame parent, final IFrontEndDebuggerProvider panel) {
Preconditions.checkNotNull(panel, "IE01655: Panel argument can not be null");
m_parent = parent;
m_debugPerspectiveModel = panel;
}
@Override
public void actionPerformed(final ActionEvent event) {
final IDebugger debugger = m_debugPerspectiveModel.getCurrentSelectedDebugger();
final TargetProcessThread currentThread =
debugger == null ? null : debugger.getProcessManager().getActiveThread();
if (currentThread != null && debugger != null) {
CDebuggerFunctions.stepInto(m_parent, debugger);
}
}
}
| apache-2.0 |
racker/omnibus | source/otp_src_R14B02/lib/ic/java_src/com/ericsson/otp/ic/Term.java | 22176 | /*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1999-2009. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
*
* %CopyrightEnd%
*
*/
package com.ericsson.otp.ic;
/**
The Term class is intended to represent the erlang term generic type.
It extends the Any class and is basically used the same way as the Any class.
<p>The main difference between Term and Any is the use of guard methods
instead for TypeCode to determine the data included in the Term.
This actual when cannot determine a Term's value class returned at compile time.
**/
final public class Term extends Any {
// Primitive value holder
protected java.lang.String atomV;
protected long longV;
protected Pid PidV;
protected Ref RefV;
protected Port PortV;
protected com.ericsson.otp.erlang.OtpErlangObject ObjV;
protected int tag;
/**
Tag accessor method
@return int, the tag of the Object that denotes the erlang external format tag
**/
public int tag() {
return tag;
}
/* Guards */
/**
Guard method
@return true if the Term is an OtpErlangAtom, false otherwize
**/
public boolean isAtom() {
if (ObjV == null) {
if (tag == com.ericsson.otp.erlang.OtpExternal.atomTag)
return true;
return false;
}
return (ObjV instanceof com.ericsson.otp.erlang.OtpErlangAtom) ;
}
/**
Guard method
@return true if the Term is not an OtpErlangList nor an OtpErlangTuple, false otherwize
**/
public boolean isConstant() {
if (isList())
return false;
if (isTuple())
return false;
return true;
}
/**
Guard method
@return true if the Term is an OtpErlangFloat, false otherwize
**/
public boolean isFloat() {
if (tag == com.ericsson.otp.erlang.OtpExternal.floatTag)
return true;
return false;
}
/**
Guard method
@return true if the Term is an OtpErlangInt, false otherwize
**/
public boolean isInteger() {
switch(tag) {
case com.ericsson.otp.erlang.OtpExternal.smallIntTag:
case com.ericsson.otp.erlang.OtpExternal.intTag:
case com.ericsson.otp.erlang.OtpExternal.smallBigTag:
return true;
default:
return false;
}
}
/**
Guard method
@return true if the Term is an OtpErlangList, false otherwize
**/
public boolean isList() {
if (ObjV == null) {
switch(tag) {
case com.ericsson.otp.erlang.OtpExternal.listTag:
case com.ericsson.otp.erlang.OtpExternal.stringTag:
case com.ericsson.otp.erlang.OtpExternal.nilTag:
return true;
default:
return false;
}
}
if (ObjV instanceof com.ericsson.otp.erlang.OtpErlangList)
return true;
if (ObjV instanceof com.ericsson.otp.erlang.OtpErlangString)
return true;
return false;
}
/**
Guard method
@return true if the Term is an OtpErlangString, false otherwize
**/
public boolean isString() {
if (ObjV == null) {
switch(tag) {
case com.ericsson.otp.erlang.OtpExternal.stringTag:
case com.ericsson.otp.erlang.OtpExternal.nilTag:
return true;
default:
try {
stringV = extract_string();
return true;
} catch (Exception e) {
return false;
}
}
}
if (ObjV instanceof com.ericsson.otp.erlang.OtpErlangString)
return true;
try {
stringV = extract_string();
return true;
} catch (Exception e) {
return false;
}
}
/**
Guard method
@return true if the Term is an OtpErlangInteger or an OtpErlangFloat, false otherwize
**/
public boolean isNumber() {
switch(tag) {
case com.ericsson.otp.erlang.OtpExternal.smallIntTag:
case com.ericsson.otp.erlang.OtpExternal.intTag:
case com.ericsson.otp.erlang.OtpExternal.smallBigTag:
case com.ericsson.otp.erlang.OtpExternal.floatTag:
return true;
default :
return false;
}
}
/**
Guard method
@return true if the Term is an OtpErlangPid or Pid, false otherwize
**/
public boolean isPid() {
if (ObjV == null) {
if (tag == com.ericsson.otp.erlang.OtpExternal.pidTag)
return true;
return false;
}
return (ObjV instanceof com.ericsson.otp.erlang.OtpErlangPid) ;
}
/**
Guard method
@return true if the Term is an OtpErlangPort or Port, false otherwize
**/
public boolean isPort() {
if (ObjV == null) {
if (tag == com.ericsson.otp.erlang.OtpExternal.portTag)
return true;
return false;
}
return (ObjV instanceof com.ericsson.otp.erlang.OtpErlangPort);
}
/**
Guard method
@return true if the Term is an OtpErlangRef, false otherwize
**/
public boolean isReference() {
if (ObjV == null) {
switch(tag) {
case com.ericsson.otp.erlang.OtpExternal.refTag:
case com.ericsson.otp.erlang.OtpExternal.newRefTag:
return true;
default :
return false;
}
}
return (ObjV instanceof com.ericsson.otp.erlang.OtpErlangRef) ;
}
/**
Guard method
@return true if the Term is an OtpErlangTuple, false otherwize
**/
public boolean isTuple() {
if (ObjV == null) {
switch(tag) {
case com.ericsson.otp.erlang.OtpExternal.smallTupleTag:
case com.ericsson.otp.erlang.OtpExternal.largeTupleTag:
return true;
default :
return false;
}
}
return (ObjV instanceof com.ericsson.otp.erlang.OtpErlangTuple);
}
/**
Guard method
@return true if the Term is an OtpErlangBinary, false otherwize
**/
public boolean isBinary() {
if (ObjV == null) {
if (tag == com.ericsson.otp.erlang.OtpExternal.binTag)
return true;
return false;
}
return (ObjV instanceof com.ericsson.otp.erlang.OtpErlangBinary);
}
// Equal function
/**
Term comparison method
@return true if the input Term is equal to the object, false otherwize
**/
public boolean equal(Term _any) {
try {
/* Pids */
if ((PidV != null) && (_any.PidV != null))
if (PidV.equal(_any.PidV))
return true;
/* Refs */
if ((RefV != null) && (_any.RefV != null))
if (RefV.equal(_any.RefV))
return true;
/* Ports */
if ((PortV != null) && (_any.PortV != null))
if (PortV.equals(_any.PortV))
return true;
/* strings */
if ((stringV != null) && (_any.stringV != null))
if (stringV.equals(_any.stringV))
return true;
/* atoms and booleans */
if ((atomV != null) && (_any.atomV != null))
if (atomV.equals(_any.atomV))
return true;
/* booleans */
if (atomV != null)
if (_any.booleanV == Boolean.valueOf(atomV).booleanValue())
return true;
if (_any.atomV != null)
if (booleanV == Boolean.valueOf(_any.atomV).booleanValue())
return true;
/* integer types plus floating point types */
double _ownNS =
longV+doubleV;
double _othersNS =
_any.longV+_any.doubleV;
if ((equal(_ownNS,_othersNS)) &&
(!equal(_ownNS,0)))
return true;
/* All together, 0 or false */
if ((equal(_ownNS,_othersNS)) &&
booleanV == _any.booleanV)
return true;
return false;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
Writes the value of Term to a stream
**/
public void write_value(com.ericsson.otp.erlang.OtpOutputStream _os)
throws java.lang.Exception {
if ((tcV == null) && (ObjV != null))
_os.write_any(ObjV); // Type not generated by IC
else {
switch(tcV.kind().value()) {
case TCKind._tk_octet :
case TCKind._tk_char :
case TCKind._tk_wchar :
case TCKind._tk_short :
case TCKind._tk_ushort :
case TCKind._tk_long :
case TCKind._tk_longlong :
case TCKind._tk_ulong :
case TCKind._tk_ulonglong :
_os.write_long(longV);
break;
case TCKind._tk_float :
_os.write_double(doubleV);
break;
case TCKind._tk_double :
_os.write_double(doubleV);
break;
case TCKind._tk_boolean :
_os.write_boolean(booleanV);
break;
case TCKind._tk_string :
case TCKind._tk_wstring :
_os.write_string(stringV);
break;
case TCKind._tk_atom :
_os.write_atom(stringV);
break;
case TCKind._tk_struct:
if (isPid())
PidHelper.marshal(_os, PidV);
else {
if (isReference())
RefHelper.marshal(_os, RefV);
else {
if (isPort())
PortHelper.marshal(_os, PortV);
else
_os.write(os.toByteArray());
}
}
break;
case TCKind._tk_union:
case TCKind._tk_array:
case TCKind._tk_sequence:
case TCKind._tk_enum:
_os.write(os.toByteArray());
break;
case TCKind._tk_void :
_os.write_atom("ok");
break;
/*
* Not supported types
*/
default:
throw new java.lang.Exception("BAD KIND");
}
}
}
/*
* Insert and extract each primitive type
*/
/* short */
/**
Short value extractor method
@return short, the value of Term
**/
public short extract_short()
throws java.lang.Exception {
if (tcV == null)
return (short) longV;
if (tcV.kind() == TCKind.tk_short)
return (short) longV;
throw new java.lang.Exception("");
}
/**
Short value insertion method
**/
public void insert_short(short s) {
longV = s;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_short);
};
/**
Short value insertion method
**/
public void insert_short(long l) {
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_short);
};
/* long */
/**
Long value extractor method
@return int, the value of Term
**/
public int extract_long()
throws java.lang.Exception {
if (tcV == null)
return (int) longV;
if (tcV.kind() == TCKind.tk_long)
return (int) longV;
throw new java.lang.Exception("");
}
/**
Long value insertion method
**/
public void insert_long(int i){
longV = i;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_long);
}
/**
Long value insertion method
**/
public void insert_long(long l){
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_long);
}
/* longlong */
/**
Long Long value extractor method
@return long, the value of Term
**/
public long extract_longlong()
throws java.lang.Exception {
if (tcV == null)
return longV;
if (tcV.kind() == TCKind.tk_longlong)
return longV;
throw new java.lang.Exception("");
}
/**
Long Long value insertion method
**/
public void insert_longlong(long l){
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_longlong);
}
/* ushort */
/**
Unsigned Short value extractor method
@return short, the value of Term
**/
public short extract_ushort()
throws java.lang.Exception {
if (tcV == null)
return (short) longV;
if (tcV.kind() == TCKind.tk_ushort)
return (short) longV;
throw new java.lang.Exception("");
}
/**
Unsigned Short value insertion method
**/
public void insert_ushort(short s){
longV = s;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_ushort);
}
/**
Unsigned Short value insertion method
**/
public void insert_ushort(long l){
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_ushort);
}
/* ulong */
/**
Unsigned Long value extractor method
@return int, the value of Term
**/
public int extract_ulong()
throws java.lang.Exception{
if (tcV == null)
return (int) longV;
if (tcV.kind() == TCKind.tk_ulong)
return (int) longV;
throw new java.lang.Exception("");
}
/**
Unsigned Long value insertion method
**/
public void insert_ulong(int i){
longV = i;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_ulong);
}
/**
Unsigned Long value insertion method
**/
public void insert_ulong(long l){
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_ulong);
}
/* ulonglong */
/**
Unsigned Long Long value extractor method
@return long, the value of Term
**/
public long extract_ulonglong()
throws java.lang.Exception {
if (tcV == null)
return longV;
if (tcV.kind() == TCKind.tk_ulonglong)
return longV;
throw new java.lang.Exception("");
}
/**
Unsigned Long Long value insertion method
**/
public void insert_ulonglong(long l){
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.intTag;
tcV = new TypeCode(TCKind.tk_ulonglong);
}
/* float */
/**
Float value extractor method
@return float, the value of Term
**/
public float extract_float()
throws java.lang.Exception{
if (tcV == null)
return (float) doubleV;
if (tcV.kind() == TCKind.tk_float)
return (float) doubleV;
throw new java.lang.Exception("");
}
/**
Float value insertion method
**/
public void insert_float(float f){
doubleV = f;
tag = com.ericsson.otp.erlang.OtpExternal.floatTag;
tcV = new TypeCode(TCKind.tk_float);
}
/**
Float value insertion method
**/
public void insert_float(double f){
doubleV = f;
tag = com.ericsson.otp.erlang.OtpExternal.floatTag;
tcV = new TypeCode(TCKind.tk_float);
}
/* double */
/**
Double value extractor method
@return double, the value of Term
**/
public double extract_double()
throws java.lang.Exception{
if (tcV == null)
return doubleV;
if (tcV.kind() == TCKind.tk_double)
return doubleV;
throw new java.lang.Exception("");
}
/**
Double value insertion method
**/
public void insert_double(double d){
doubleV = d;
tag = com.ericsson.otp.erlang.OtpExternal.floatTag;
tcV = new TypeCode(TCKind.tk_double);
}
/* boolean */
/**
Boolean value extractor method
@return boolean, the value of Term
**/
public boolean extract_boolean()
throws java.lang.Exception{
if ((tcV == null) && (atomV != null))
return Boolean.valueOf(atomV).booleanValue();
if (tcV.kind() == TCKind.tk_boolean)
return booleanV;
throw new java.lang.Exception("");
}
/**
Boolean value insertion method
**/
public void insert_boolean(boolean b){
booleanV = b;
tag = com.ericsson.otp.erlang.OtpExternal.atomTag;
tcV = new TypeCode(TCKind.tk_boolean);
}
/* char */
/**
Char value extractor method
@return char, the value of Term
**/
public char extract_char()
throws java.lang.Exception{
if (tcV == null)
return (char) longV;
if (tcV.kind() == TCKind.tk_char)
return (char) longV;
throw new java.lang.Exception("");
}
/**
Char value insertion method
**/
public void insert_char(char c) {
longV = c;
tag = com.ericsson.otp.erlang.OtpExternal.smallIntTag;
tcV = new TypeCode(TCKind.tk_char);
}
/**
Char value insertion method
**/
public void insert_char(long l) {
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.smallIntTag;
tcV = new TypeCode(TCKind.tk_char);
}
/* wchar */
/**
Wchar value extractor method
@return char, the value of Term
**/
public char extract_wchar()
throws java.lang.Exception{
if (tcV == null)
return (char) longV;
if (tcV.kind() == TCKind.tk_wchar)
return (char) longV;
throw new java.lang.Exception("");
}
/**
Wchar value insertion method
**/
public void insert_wchar(char c) {
longV = c;
tag = com.ericsson.otp.erlang.OtpExternal.smallIntTag;
tcV = new TypeCode(TCKind.tk_wchar);
}
/**
Wchar value insertion method
**/
public void insert_wchar(long l) {
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.smallIntTag;
tcV = new TypeCode(TCKind.tk_wchar);
}
/* octet */
/**
Octet value extractor method
@return byte, the value of Term
**/
public byte extract_octet()
throws java.lang.Exception{
if (tcV == null)
return (byte) longV;
if (tcV.kind() == TCKind.tk_octet)
return (byte) longV;
throw new java.lang.Exception("");
}
/**
Octet value insertion method
**/
public void insert_octet(byte b){
longV = b;
tag = com.ericsson.otp.erlang.OtpExternal.smallIntTag;
tcV = new TypeCode(TCKind.tk_octet);
}
/**
Octet value insertion method
**/
public void insert_octet(long l){
longV = l;
tag = com.ericsson.otp.erlang.OtpExternal.smallIntTag;
tcV = new TypeCode(TCKind.tk_octet);
}
/* string */
/**
String value extractor method
@return String, the value of Term
**/
public java.lang.String extract_string()
throws java.lang.Exception{
if (tcV == null) {
if (stringV != null)
return stringV;
else {
is = this.extract_Streamable();
stringV = is.read_string();
return stringV;
}
}
else
if (tcV.kind() == TCKind.tk_string)
return stringV;
throw new java.lang.Exception("");
}
/**
String value insertion method
**/
public void insert_string(java.lang.String s) {
stringV = s;
tag = com.ericsson.otp.erlang.OtpExternal.stringTag;
tcV = new TypeCode(TCKind.tk_string);
}
/* wstring */
/**
Wstring value extractor method
@return String, the value of Term
**/
public java.lang.String extract_wstring()
throws java.lang.Exception{
if (tcV == null) {
if (stringV != null)
return stringV;
else {
is = this.extract_Streamable();
stringV = is.read_string();
return stringV;
}
}
else
if (tcV.kind() == TCKind.tk_wstring)
return stringV;
throw new java.lang.Exception("");
}
/**
Wstring value insertion method
**/
public void insert_wstring(java.lang.String s) {
stringV = s;
tag = com.ericsson.otp.erlang.OtpExternal.stringTag;
tcV = new TypeCode(TCKind.tk_wstring);
}
/* atom */
/**
Atom value extractor method
@return atom, the value of Term
**/
public java.lang.String extract_atom()
throws java.lang.Exception{
if ((tcV == null) && (atomV != null))
return atomV;
if (tcV.kind() == TCKind.tk_atom)
return stringV;
throw new java.lang.Exception("");
}
/**
Atom value insertion method
**/
public void insert_atom(java.lang.String s) {
stringV = s;
tag = com.ericsson.otp.erlang.OtpExternal.atomTag;
tcV = new TypeCode(TCKind.tk_atom);
}
/* Pid */
/**
Pid value extractor method
@return Pid, the value of Term
**/
public Pid extract_Pid()
throws java.lang.Exception{
if ((tcV == null) && (PidV != null))
return PidV;
if (tcV.equal(PidHelper.type()))
return PidV;
throw new java.lang.Exception("");
}
/**
Pid value insertion method
**/
public void insert_Pid(Pid p) {
PidV = p;
tag = com.ericsson.otp.erlang.OtpExternal.pidTag;
tcV = PidHelper.type();
}
/* Ref */
/**
Ref value extractor method
@return Ref, the value of Term
**/
public Ref extract_Ref()
throws java.lang.Exception{
if ((tcV == null) && (RefV != null))
return RefV;
if (tcV.equal(RefHelper.type()))
return RefV;
throw new java.lang.Exception("");
}
/**
Ref value insertion method
**/
public void insert_Ref(Ref r) {
RefV = r;
if (r.isNewRef())
tag = com.ericsson.otp.erlang.OtpExternal.newRefTag;
else
tag = com.ericsson.otp.erlang.OtpExternal.refTag;
tcV = RefHelper.type();
}
/* Port */
/**
Port value extractor method
@return Port, the value of Term
**/
public Port extract_Port()
throws java.lang.Exception{
if ((tcV == null) && (PortV != null))
return PortV;
if (tcV.equal(PortHelper.type()))
return PortV;
throw new java.lang.Exception("");
}
/**
Port value insertion method
**/
public void insert_Port(Port p) {
PortV = p;
tag = com.ericsson.otp.erlang.OtpExternal.portTag;
tcV = PortHelper.type();
}
/**
Object Stream extractor method
@return OtpInputStream, the stream value of Term
**/
public com.ericsson.otp.erlang.OtpInputStream extract_Streamable() {
if (is == null) {
if (os == null) {
if (stringV == null)
return null;
else {
// A sequence that become a string !
os = new com.ericsson.otp.erlang.OtpOutputStream();
os.write_string(stringV);
is = new com.ericsson.otp.erlang.OtpInputStream(os.toByteArray());
}
}
else {
is = new com.ericsson.otp.erlang.OtpInputStream(os.toByteArray());
}
}
is.reset();
return is;
}
/**
Inserts Objects to Term
**/
public void insert_Object(com.ericsson.otp.erlang.OtpErlangObject o) {
ObjV = o;
}
/**
Extract Object value from Term
@return OtpErlangObject, the Object value of Term
**/
public com.ericsson.otp.erlang.OtpErlangObject extract_Object() {
return ObjV;
}
}
| apache-2.0 |
skubit/skubit-comics | billing/src/main/java/com/skubit/iab/provider/base/BaseContentProvider.java | 7890 | package com.skubit.iab.provider.base;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashSet;
import android.content.ContentProvider;
import android.content.ContentProviderOperation;
import android.content.ContentProviderResult;
import android.content.ContentValues;
import android.content.OperationApplicationException;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import android.provider.BaseColumns;
import android.util.Log;
public abstract class BaseContentProvider extends ContentProvider {
public static final String QUERY_NOTIFY = "QUERY_NOTIFY";
public static final String QUERY_GROUP_BY = "QUERY_GROUP_BY";
public static final String QUERY_HAVING = "QUERY_HAVING";
public static final String QUERY_LIMIT = "QUERY_LIMIT";
public static class QueryParams {
public String table;
public String tablesWithJoins;
public String idColumn;
public String selection;
public String orderBy;
}
protected abstract QueryParams getQueryParams(Uri uri, String selection, String[] projection);
protected abstract boolean hasDebug();
protected abstract SQLiteOpenHelper createSqLiteOpenHelper();
protected SQLiteOpenHelper mSqLiteOpenHelper;
@Override
public final boolean onCreate() {
if (hasDebug()) {
// Enable logging of SQL statements as they are executed.
try {
Class<?> sqliteDebugClass = Class.forName("android.database.sqlite.SQLiteDebug");
Field field = sqliteDebugClass.getDeclaredField("DEBUG_SQL_STATEMENTS");
field.setAccessible(true);
field.set(null, true);
// Uncomment the following block if you also want logging of execution time (more verbose)
// field = sqliteDebugClass.getDeclaredField("DEBUG_SQL_TIME");
// field.setAccessible(true);
// field.set(null, true);
} catch (Throwable t) {
if (hasDebug()) Log.w(getClass().getSimpleName(), "Could not enable SQLiteDebug logging", t);
}
}
mSqLiteOpenHelper = createSqLiteOpenHelper();
return false;
}
@Override
public Uri insert(Uri uri, ContentValues values) {
String table = uri.getLastPathSegment();
long rowId = mSqLiteOpenHelper.getWritableDatabase().insertOrThrow(table, null, values);
if (rowId == -1) return null;
String notify;
if (((notify = uri.getQueryParameter(QUERY_NOTIFY)) == null || "true".equals(notify))) {
getContext().getContentResolver().notifyChange(uri, null);
}
return uri.buildUpon().appendEncodedPath(String.valueOf(rowId)).build();
}
@Override
public int bulkInsert(Uri uri, ContentValues[] values) {
String table = uri.getLastPathSegment();
SQLiteDatabase db = mSqLiteOpenHelper.getWritableDatabase();
int res = 0;
db.beginTransaction();
try {
for (ContentValues v : values) {
long id = db.insert(table, null, v);
db.yieldIfContendedSafely();
if (id != -1) {
res++;
}
}
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
String notify;
if (res != 0 && ((notify = uri.getQueryParameter(QUERY_NOTIFY)) == null || "true".equals(notify))) {
getContext().getContentResolver().notifyChange(uri, null);
}
return res;
}
@Override
public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
QueryParams queryParams = getQueryParams(uri, selection, null);
int res = mSqLiteOpenHelper.getWritableDatabase().update(queryParams.table, values, queryParams.selection, selectionArgs);
String notify;
if (res != 0 && ((notify = uri.getQueryParameter(QUERY_NOTIFY)) == null || "true".equals(notify))) {
getContext().getContentResolver().notifyChange(uri, null);
}
return res;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
QueryParams queryParams = getQueryParams(uri, selection, null);
int res = mSqLiteOpenHelper.getWritableDatabase().delete(queryParams.table, queryParams.selection, selectionArgs);
String notify;
if (res != 0 && ((notify = uri.getQueryParameter(QUERY_NOTIFY)) == null || "true".equals(notify))) {
getContext().getContentResolver().notifyChange(uri, null);
}
return res;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) {
String groupBy = uri.getQueryParameter(QUERY_GROUP_BY);
String having = uri.getQueryParameter(QUERY_HAVING);
String limit = uri.getQueryParameter(QUERY_LIMIT);
QueryParams queryParams = getQueryParams(uri, selection, projection);
projection = ensureIdIsFullyQualified(projection, queryParams.table, queryParams.idColumn);
Cursor res = mSqLiteOpenHelper.getReadableDatabase().query(queryParams.tablesWithJoins, projection, queryParams.selection, selectionArgs, groupBy,
having, sortOrder == null ? queryParams.orderBy : sortOrder, limit);
res.setNotificationUri(getContext().getContentResolver(), uri);
return res;
}
private String[] ensureIdIsFullyQualified(String[] projection, String tableName, String idColumn) {
if (projection == null) return null;
String[] res = new String[projection.length];
for (int i = 0; i < projection.length; i++) {
if (projection[i].equals(idColumn)) {
res[i] = tableName + "." + idColumn + " AS " + BaseColumns._ID;
} else {
res[i] = projection[i];
}
}
return res;
}
@Override
public ContentProviderResult[] applyBatch(ArrayList<ContentProviderOperation> operations) throws OperationApplicationException {
HashSet<Uri> urisToNotify = new HashSet<Uri>(operations.size());
for (ContentProviderOperation operation : operations) {
urisToNotify.add(operation.getUri());
}
SQLiteDatabase db = mSqLiteOpenHelper.getWritableDatabase();
db.beginTransaction();
try {
int numOperations = operations.size();
ContentProviderResult[] results = new ContentProviderResult[numOperations];
int i = 0;
for (ContentProviderOperation operation : operations) {
results[i] = operation.apply(this, results, i);
if (operation.isYieldAllowed()) {
db.yieldIfContendedSafely();
}
i++;
}
db.setTransactionSuccessful();
for (Uri uri : urisToNotify) {
getContext().getContentResolver().notifyChange(uri, null);
}
return results;
} finally {
db.endTransaction();
}
}
public static Uri notify(Uri uri, boolean notify) {
return uri.buildUpon().appendQueryParameter(QUERY_NOTIFY, String.valueOf(notify)).build();
}
public static Uri groupBy(Uri uri, String groupBy) {
return uri.buildUpon().appendQueryParameter(QUERY_GROUP_BY, groupBy).build();
}
public static Uri having(Uri uri, String having) {
return uri.buildUpon().appendQueryParameter(QUERY_HAVING, having).build();
}
public static Uri limit(Uri uri, String limit) {
return uri.buildUpon().appendQueryParameter(QUERY_LIMIT, limit).build();
}
}
| apache-2.0 |
apache/archiva | archiva-modules/archiva-base/archiva-event-central/src/main/java/org/apache/archiva/event/central/CentralEventManager.java | 1636 | package org.apache.archiva.event.central;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.event.AbstractEventManager;
import org.apache.archiva.event.Event;
import org.apache.archiva.event.EventHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
/**
* Event manager that collects all events from archiva subsystems.
*
* @author Martin Schreier <martin_s@apache.org>
*/
@Service("eventManager#archiva")
public class CentralEventManager extends AbstractEventManager implements EventHandler<Event>
{
private static final Logger log = LoggerFactory.getLogger( CentralEventManager.class );
@Override
public void handle( Event event )
{
log.info( "Event: type={}, sourceClass={}, source={}", event.getType( ), event.getSource().getClass(), event.getSource() );
}
}
| apache-2.0 |
jfrazee/incubator-streams | streams-config/src/test/java/org/apache/streams/config/test/StreamsConfigurationForTesting.java | 2249 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.streams.config.test;
import org.apache.streams.config.ComponentConfiguration;
import org.apache.streams.config.StreamsConfiguration;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.juneau.annotation.BeanProperty;
public class StreamsConfigurationForTesting extends StreamsConfiguration {
@JsonProperty("componentOne")
@BeanProperty("componentOne")
private org.apache.streams.config.ComponentConfiguration componentOne;
@JsonProperty("componentTwo")
@BeanProperty("componentTwo")
private org.apache.streams.config.ComponentConfiguration componentTwo;
public StreamsConfigurationForTesting() {
}
public StreamsConfigurationForTesting(ComponentConfiguration componentOne, ComponentConfiguration componentTwo) {
this.componentOne = componentOne;
this.componentTwo = componentTwo;
}
@JsonProperty("componentOne")
@BeanProperty("componentOne")
public ComponentConfiguration getComponentOne() {
return componentOne;
}
@JsonProperty("componentOne")
@BeanProperty("componentOne")
public void setComponentOne(ComponentConfiguration componentOne) {
this.componentOne = componentOne;
}
@JsonProperty("componentTwo")
@BeanProperty("componentTwo")
public ComponentConfiguration getComponentTwo() {
return componentTwo;
}
@JsonProperty("componentTwo")
@BeanProperty("componentTwo")
public void setComponentTwo(ComponentConfiguration componentTwo) {
this.componentTwo = componentTwo;
}
}
| apache-2.0 |
andrew-su/Android-CleanArchitecture | presentation/src/main/java/com/fernandocejas/android10/sample/presentation/view/activity/MainActivity.java | 792 | package com.fernandocejas.android10.sample.presentation.view.activity;
import android.os.Bundle;
import android.widget.Button;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
import com.fernandocejas.android10.sample.presentation.R;
/**
* Main application screen. This is the app entry point.
*/
public class MainActivity extends BaseActivity {
@InjectView(R.id.btn_LoadData) Button btn_LoadData;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.inject(this);
}
/**
* Goes to the user list screen.
*/
@OnClick(R.id.btn_LoadData)
void navigateToUserList() {
this.navigator.navigateToUserList(this);
}
}
| apache-2.0 |
lizhanhui/rocketmq-console | src/main/java/com/alibaba/rocketmq/interceptor/Helper.java | 307 | package com.alibaba.rocketmq.interceptor;
final class Helper {
static final String TOKEN_KEY = "token";
static final String REDIRECT_KEY = "redirect";
private Helper() {
}
static final String LOGIN_KEY = "login_in_session";
static final String CALLBACK_URL = "callback.do";
}
| apache-2.0 |
arvindn05/osc-core | osc-ui/src/main/java/org/osc/core/broker/view/maintenance/SummaryLayout.java | 8477 | /*******************************************************************************
* Copyright (c) Intel Corporation
* Copyright (c) 2017
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.osc.core.broker.view.maintenance;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.InetAddress;
import java.util.Date;
import org.apache.commons.io.FileUtils;
import org.osc.core.broker.service.api.BackupServiceApi;
import org.osc.core.broker.service.api.server.ArchiveApi;
import org.osc.core.broker.service.api.server.ServerApi;
import org.osc.core.broker.service.request.BackupRequest;
import org.osc.core.broker.service.response.BackupResponse;
import org.osc.core.broker.view.common.VmidcMessages;
import org.osc.core.broker.view.common.VmidcMessages_;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.vaadin.data.Property.ValueChangeEvent;
import com.vaadin.data.Property.ValueChangeListener;
import com.vaadin.server.FileDownloader;
import com.vaadin.server.StreamResource;
import com.vaadin.ui.Button;
import com.vaadin.ui.CheckBox;
import com.vaadin.ui.FormLayout;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Table;
import com.vaadin.ui.VerticalLayout;
public class SummaryLayout extends FormLayout {
private static final String LOGDIR_PATH = "data/log";
/**
*
*/
private static final long serialVersionUID = 1L;
private Table summarytable = null;
private CheckBox checkbox = null;
private Button download = null;
private ServerApi server;
private BackupServiceApi backupService;
private ArchiveApi archiver;
private static final Logger log = LoggerFactory.getLogger(SummaryLayout.class);
public SummaryLayout(ServerApi server, BackupServiceApi backupService,
ArchiveApi archiver) {
super();
this.server = server;
this.backupService = backupService;
this.archiver = archiver;
this.summarytable = createTable();
// creating Server table
this.summarytable.addItem(new Object[] { "DNS Name: ", getHostName() }, new Integer(1));
this.summarytable.addItem(new Object[] { "IP Address: ", getIpAddress() }, new Integer(2));
this.summarytable.addItem(new Object[] { "Version: ", getVersion() }, new Integer(3));
this.summarytable.addItem(new Object[] { "Uptime: ", server.uptimeToString() }, new Integer(4));
this.summarytable.addItem(new Object[] { "Current Server Time: ", new Date().toString() }, new Integer(5));
VerticalLayout tableContainer = new VerticalLayout();
tableContainer.addComponent(this.summarytable);
addComponent(tableContainer);
addComponent(createCheckBox());
HorizontalLayout actionContainer = new HorizontalLayout();
actionContainer.addComponent(createDownloadButton());
addComponent(actionContainer);
}
private Table createTable() {
Table table = new Table();
table.setSizeFull();
table.setPageLength(0);
table.setSelectable(false);
table.setColumnCollapsingAllowed(true);
table.setColumnReorderingAllowed(true);
table.setImmediate(true);
table.setNullSelectionAllowed(false);
table.addContainerProperty("Name", String.class, null);
table.addContainerProperty("Status", String.class, null);
table.setColumnHeaderMode(Table.ColumnHeaderMode.HIDDEN);
return table;
}
@SuppressWarnings("serial")
private Button createDownloadButton() {
this.download = new Button(VmidcMessages.getString(VmidcMessages_.SUMMARY_DOWNLOAD_LOG)) {
@Override
public void setEnabled(boolean enabled) {
if (enabled) {
// because setEnabled(false) calls are ignored and button is disabled
// on client because of setDisableOnClick(true), by doing this we
// make sure that the button is actually disabled so that setEnabled(true)
// has effect
getUI().getConnectorTracker().getDiffState(this).put("enabled", false);
super.setEnabled(enabled);
}
}
};
SummaryLayout.this.download.setDisableOnClick(true);
if (this.checkbox != null && this.checkbox.getValue()) {
this.download.setCaption(VmidcMessages.getString(VmidcMessages_.SUMMARY_DOWNLOAD_BUNDLE));
}
StreamResource zipStream = getZipStream();
FileDownloader fileDownloader = new FileDownloader(zipStream);
fileDownloader.extend(this.download);
return this.download;
}
@SuppressWarnings("serial")
private CheckBox createCheckBox() {
this.checkbox = new CheckBox(VmidcMessages.getString(VmidcMessages_.SUMMARY_DOWNLOAD_INCLUDE_DB));
this.checkbox.setImmediate(true);
this.checkbox.setValue(false);
this.checkbox.addValueChangeListener(new ValueChangeListener() {
@Override
public void valueChange(ValueChangeEvent event) {
if (SummaryLayout.this.checkbox.getValue()) {
SummaryLayout.this.download
.setCaption(VmidcMessages.getString(VmidcMessages_.SUMMARY_DOWNLOAD_BUNDLE));
} else {
SummaryLayout.this.download
.setCaption(VmidcMessages.getString(VmidcMessages_.SUMMARY_DOWNLOAD_LOG));
}
}
});
return this.checkbox;
}
@SuppressWarnings("serial")
private StreamResource getZipStream() {
StreamResource.StreamSource source = new StreamResource.StreamSource() {
@Override
public InputStream getStream() {
InputStream fin = null;
try {
if (SummaryLayout.this.checkbox.getValue()) {
getDBBackup();
}
// creating a zip file resource to download
fin = new FileInputStream(SummaryLayout.this.archiver.archive(LOGDIR_PATH, "ServerSupportBundle.zip"));
} catch (Exception exception) {
log.error("Failed! to receive zip file from Archieve Util", exception);
} finally {
SummaryLayout.this.backupService.deleteBackupFilesFrom(LOGDIR_PATH);
SummaryLayout.this.download.setEnabled(true);
}
return fin;
}
};
StreamResource resource = new StreamResource(source, "ServerSupportBundle.zip");
return resource;
}
private void getDBBackup() {
try {
BackupResponse res = this.backupService.dispatch(new BackupRequest());
if (res.isSuccess()) {
// move backup to log directory
FileUtils.copyFileToDirectory(this.backupService.getEncryptedBackupFile(), new File("log" + File.separator));
this.backupService.deleteBackupFiles();
log.info("Backup Successful! adding backup file to Support Bundle");
}
} catch (Exception e) {
log.error("Failed to add DB backup in support bundle", e);
}
}
private String getHostName() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (Exception e) {
log.error("Error while reading Host Name ", e);
}
return "";
}
public String getIpAddress() {
try {
return this.server.getHostIpAddress();
} catch (Exception e) {
log.error("Error while Host IP address ", e);
}
return "";
}
private String getVersion() {
return this.server.getVersionStr();
}
}
| apache-2.0 |
tbroyer/error-prone | core/src/test/java/com/google/errorprone/util/ASTHelpersTest.java | 9463 | /*
* Copyright 2013 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import com.google.common.base.Joiner;
import com.google.errorprone.VisitorState;
import com.google.errorprone.matchers.CompilerBasedAbstractTest;
import com.google.errorprone.matchers.Matcher;
import com.google.errorprone.scanner.Scanner;
import com.sun.source.tree.AnnotationTree;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.ExpressionStatementTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.LiteralTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.VariableTree;
import com.sun.tools.javac.tree.JCTree.JCLiteral;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.ArrayList;
import java.util.List;
@RunWith(JUnit4.class)
public class ASTHelpersTest extends CompilerBasedAbstractTest {
// For tests that expect a specific offset in the file, we test with both Windows and UNIX
// line separators, but we hardcode the line separator in the tests to ensure the tests are
// hermetic and do not depend on the platform on which they are run.
private static final Joiner UNIX_LINE_JOINER = Joiner.on("\n");
private static final Joiner WINDOWS_LINE_JOINER = Joiner.on("\r\n");
final List<TestScanner> tests = new ArrayList<>();
@After
public void tearDown() {
for (TestScanner test : tests) {
test.verifyAssertionsComplete();
}
}
@Test
public void testGetActualStartPositionUnix() {
String fileContent = UNIX_LINE_JOINER.join(
"public class A { ",
" public void foo() {",
" int i;",
" i = -1;",
" }",
"}");
writeFile("A.java", fileContent);
assertCompiles(literalExpressionMatches(literalHasActualStartPosition(59)));
}
@Test
public void testGetActualStartPositionWindows() {
String fileContent = WINDOWS_LINE_JOINER.join(
"public class A { ",
" public void foo() {",
" int i;",
" i = -1;",
" }",
"}");
writeFile("A.java", fileContent);
assertCompiles(literalExpressionMatches(literalHasActualStartPosition(62)));
}
@Test
public void testGetActualStartPositionWithWhitespaceUnix() {
String fileContent = UNIX_LINE_JOINER.join(
"public class A { ",
" public void foo() {",
" int i;",
" i = - 1;",
" }",
"}");
writeFile("A.java", fileContent);
assertCompiles(literalExpressionMatches(literalHasActualStartPosition(59)));
}
@Test
public void testGetActualStartPositionWithWhitespaceWindows() {
String fileContent = WINDOWS_LINE_JOINER.join(
"public class A { ",
" public void foo() {",
" int i;",
" i = - 1;",
" }",
"}");
writeFile("A.java", fileContent);
assertCompiles(literalExpressionMatches(literalHasActualStartPosition(62)));
}
private Matcher<LiteralTree> literalHasActualStartPosition(final int startPosition) {
return new Matcher<LiteralTree>() {
@Override
public boolean matches(LiteralTree tree, VisitorState state) {
JCLiteral literal = (JCLiteral) tree;
return ASTHelpers.getActualStartPosition(literal, state.getSourceCode()) == startPosition;
}
};
}
private Scanner literalExpressionMatches(final Matcher<LiteralTree> matcher) {
TestScanner scanner = new TestScanner() {
@Override
public Void visitLiteral(LiteralTree node, VisitorState state) {
assertMatch(node, state, matcher);
setAssertionsComplete();
return super.visitLiteral(node, state);
}
};
tests.add(scanner);
return scanner;
}
@Test
public void testGetReceiver() {
writeFile("A.java",
"public class A { ",
" public B b;",
" public void foo() {}",
" public B bar() {",
" return null;",
" }",
"}");
writeFile("B.java",
"public class B { ",
" public void foo() {}",
"}");
writeFile("C.java",
"public class C { ",
" public void test() {",
" A a = new A();",
" a.foo();", // a
" a.b.foo();", // a.b
" a.bar().foo();", // a.bar()
" }",
"}");
assertCompiles(expressionStatementMatches("a.foo()", expressionHasReceiver("a")));
assertCompiles(expressionStatementMatches("a.b.foo()", expressionHasReceiver("a.b")));
assertCompiles(expressionStatementMatches("a.bar().foo()", expressionHasReceiver("a.bar()")));
}
private Matcher<ExpressionTree> expressionHasReceiver(final String expectedReceiver) {
return new Matcher<ExpressionTree>() {
@Override
public boolean matches(ExpressionTree t, VisitorState state) {
return ASTHelpers.getReceiver(t).toString().equals(expectedReceiver);
}
};
}
private Scanner expressionStatementMatches(final String expectedExpression,
final Matcher<ExpressionTree> matcher) {
return new TestScanner() {
@Override
public Void visitExpressionStatement(ExpressionStatementTree node, VisitorState state) {
ExpressionTree expression = node.getExpression();
if (expression.toString().equals(expectedExpression)) {
assertMatch(node.getExpression(), state, matcher);
setAssertionsComplete();
}
return super.visitExpressionStatement(node, state);
}
};
}
@Test
public void testAnnotationHelpers() {
writeFile("com/google/errorprone/util/InheritedAnnotation.java",
"package com.google.errorprone.util;",
"import java.lang.annotation.Inherited;",
"@Inherited",
"public @interface InheritedAnnotation {}");
writeFile("B.java",
"import com.google.errorprone.util.InheritedAnnotation;",
"@InheritedAnnotation",
"public class B {}");
writeFile("C.java",
"public class C extends B {}");
TestScanner scanner = new TestScanner() {
@Override
public Void visitClass(ClassTree tree, VisitorState state) {
if (tree.getSimpleName().toString().equals("C")) {
assertMatch(tree, state, new Matcher<ClassTree>() {
@Override
public boolean matches(ClassTree t, VisitorState state) {
return ASTHelpers.hasAnnotation(t, InheritedAnnotation.class);
}
});
setAssertionsComplete();
}
return super.visitClass(tree, state);
}
};
tests.add(scanner);
assertCompiles(scanner);
}
@Test
public void testGetTypeOnNestedAnnotationType() {
writeFile("A.java",
"public class A { ",
" @B.MyAnnotation",
" public void bar() {}",
"}");
writeFile("B.java",
"public class B { ",
" @interface MyAnnotation {}",
"}");
TestScanner scanner = new TestScanner() {
@Override
public Void visitAnnotation(AnnotationTree tree, VisitorState state) {
setAssertionsComplete();
assertEquals("B.MyAnnotation", ASTHelpers.getType(tree.getAnnotationType()).toString());
return super.visitAnnotation(tree, state);
}
};
tests.add(scanner);
assertCompiles(scanner);
}
@Test
public void testGetTypeOnNestedClassType() {
writeFile("A.java",
"public class A { ",
" public void bar() {",
" B.C foo;",
" }",
"}");
writeFile("B.java",
"public class B { ",
" public static class C {}",
"}");
TestScanner scanner = new TestScanner() {
@Override
public Void visitVariable(VariableTree tree, VisitorState state) {
setAssertionsComplete();
assertEquals("B.C", ASTHelpers.getType(tree.getType()).toString());
return super.visitVariable(tree, state);
}
};
tests.add(scanner);
assertCompiles(scanner);
}
private static abstract class TestScanner extends Scanner {
private boolean assertionsComplete = false;
/**
* Subclasses of {@link TestScanner} are expected to call this method within their overridden
* visitXYZ() method in order to verify that the method has run at least once.
*/
protected void setAssertionsComplete() {
this.assertionsComplete = true;
}
<T extends Tree> void assertMatch(T node, VisitorState visitorState,
Matcher<T> matcher) {
VisitorState state = visitorState.withPath(getCurrentPath());
assertTrue(matcher.matches(node, state));
}
public void verifyAssertionsComplete() {
assertTrue("Expected the visitor to call setAssertionsComplete().", assertionsComplete);
}
}
}
| apache-2.0 |
chubbymaggie/binnavi | src/main/java/com/google/security/zynamics/zylib/yfileswrap/gui/zygraph/editmode/actions/CDefaultNodeClickedLeftAction.java | 5816 | /*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.editmode.actions;
import com.google.security.zynamics.zylib.gui.zygraph.editmode.IStateAction;
import com.google.security.zynamics.zylib.gui.zygraph.editmode.helpers.CMouseCursorHelper;
import com.google.security.zynamics.zylib.gui.zygraph.nodes.IGroupNode;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.AbstractZyGraph;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.IZyEditModeListener;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.edges.ZyGraphEdge;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.editmode.CStateFactory;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.editmode.helpers.CEditNodeHelper;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.editmode.helpers.CGraphSelector;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.editmode.states.CNodeClickedLeftState;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.helpers.ProximityHelper;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.nodes.ZyGraphNode;
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.proximity.ZyProximityNode;
import y.base.Node;
import y.view.NodeLabel;
import y.view.hierarchy.GroupNodeRealizer;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Set;
import javax.swing.SwingUtilities;
public class CDefaultNodeClickedLeftAction<NodeType extends ZyGraphNode<?>, EdgeType extends ZyGraphEdge<?, ?, ?>>
implements IStateAction<CNodeClickedLeftState<NodeType, EdgeType>> {
/**
* Toggles the expansion state of a given group node.
*
* @param node The node to expand or collapse.
*/
private void toggleGroup(final NodeType node) {
final IGroupNode<?, ?> gnode = (IGroupNode<?, ?>) node.getRawNode();
gnode.setCollapsed(!gnode.isCollapsed());
}
@Override
public void execute(final CNodeClickedLeftState<NodeType, EdgeType> state, final MouseEvent event) {
CMouseCursorHelper.setDefaultCursor(state.getGraph());
final AbstractZyGraph<NodeType, EdgeType> graph = state.getGraph();
final Node n = state.getNode();
final NodeType node = graph.getNode(n);
final double x = graph.getEditMode().translateX(event.getX());
final double y = graph.getEditMode().translateY(event.getY());
final CStateFactory<NodeType, EdgeType> factory = state.getStateFactory();
if (ProximityHelper.isProximityNode(state.getGraph().getGraph(), n)) {
CEditNodeHelper.removeCaret(graph);
final ZyProximityNode<?> proximityNode =
ProximityHelper.getProximityNode(graph.getGraph(), n);
for (final IZyEditModeListener<NodeType, EdgeType> listener : factory.getListeners()) {
try {
listener.proximityBrowserNodeClicked(proximityNode, event, x, y);
} catch (final Exception exception) {
// TODO: (timkornau): implement logging here.
}
}
} else if ((node != null) && (node.getRawNode() instanceof IGroupNode)) {
CEditNodeHelper.removeCaret(graph);
final GroupNodeRealizer gnr =
(GroupNodeRealizer) graph.getGraph().getRealizer(node.getNode());
final NodeLabel handle = gnr.getStateLabel();
if (handle.getBox().contains(x, y)) {
// Clicks on the X in the group node corner
toggleGroup(node);
} else if (SwingUtilities.isLeftMouseButton(event)) {
if ((event.getClickCount() == 2) && event.isControlDown()) {
// CTRL-LEFT-DOUBLECLICK
toggleGroup(node);
} else if (event.getClickCount() == 1) {
CGraphSelector.selectNode(graph, node, event.isShiftDown());
}
}
} else {
if (node != null) {
if (graph.getEditMode().getLabelEventHandler()
.isActiveLabel(node.getRealizer().getNodeContent())) {
CEditNodeHelper.setCaretEnd(graph, state.getNode(), event);
} else {
if (graph.getEditMode().getLabelEventHandler().isActive()) {
CEditNodeHelper.removeCaret(graph);
}
final Set<NodeType> selectedNodes = graph.getSelectedNodes();
if (event.isShiftDown() && (selectedNodes.size() >= 1)) {
CGraphSelector.selectPath(graph, new ArrayList<NodeType>(selectedNodes), node);
} else if (event.isControlDown()) {
graph.selectNode(node, !node.isSelected());
} else {
final Collection<NodeType> toUnselect = new ArrayList<NodeType>(graph.getNodes());
toUnselect.remove(node);
final Collection<NodeType> toSelect = new ArrayList<NodeType>();
toSelect.add(node);
graph.selectNodes(toSelect, toUnselect);
}
}
for (final IZyEditModeListener<NodeType, EdgeType> listener : factory.getListeners()) {
// ESCA-JAVA0166: Catch Exception because we are calling a listener function
try {
listener.nodeClicked(node, event, x, y);
} catch (final Exception exception) {
// TODO: (timkornau): implement logging here.
}
}
}
}
}
}
| apache-2.0 |
objectiser/camel | core/camel-api/src/main/java/org/apache/camel/spi/ReactiveExecutor.java | 2973 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spi;
import org.apache.camel.AsyncCallback;
/**
* SPI to plugin different reactive engines in the Camel routing engine.
*/
public interface ReactiveExecutor {
/**
* Schedules the task to be run
*
* @param runnable the task
*/
default void schedule(Runnable runnable) {
schedule(runnable, null);
}
/**
* Schedules the task to be run
*
* @param runnable the task
* @param description a human readable description for logging purpose
*/
void schedule(Runnable runnable, String description);
/**
* Schedules the task to be prioritized and run asap
*
* @param runnable the task
*/
default void scheduleMain(Runnable runnable) {
scheduleMain(runnable, null);
}
/**
* Schedules the task to be prioritized and run asap
*
* @param runnable the task
* @param description a human readable description for logging purpose
*/
void scheduleMain(Runnable runnable, String description);
/**
* Schedules the task to run synchronously
*
* @param runnable the task
*/
default void scheduleSync(Runnable runnable) {
scheduleSync(runnable, null);
}
/**
* Schedules the task to run synchronously
*
* @param runnable the task
* @param description a human readable description for logging purpose
*/
void scheduleSync(Runnable runnable, String description);
/**
* Executes the next task (if supported by the reactive executor implementation)
*
* @return true if a task was executed or false if no more pending tasks
*/
boolean executeFromQueue();
/**
* Schedules the callback to be run
*
* @param callback the callable
*/
default void callback(AsyncCallback callback) {
schedule(new Runnable() {
@Override
public void run() {
callback.done(false);
}
@Override
public String toString() {
return "Callback[" + callback + "]";
}
});
}
}
| apache-2.0 |
shixuan-fan/presto | presto-main/src/main/java/com/facebook/presto/security/AccessControlManager.java | 39673 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.security;
import com.facebook.airlift.log.Logger;
import com.facebook.airlift.stats.CounterStat;
import com.facebook.presto.common.CatalogSchemaName;
import com.facebook.presto.common.QualifiedObjectName;
import com.facebook.presto.spi.CatalogSchemaTableName;
import com.facebook.presto.spi.ConnectorId;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.connector.ConnectorAccessControl;
import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
import com.facebook.presto.spi.security.AccessControlContext;
import com.facebook.presto.spi.security.Identity;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.Privilege;
import com.facebook.presto.spi.security.SystemAccessControl;
import com.facebook.presto.spi.security.SystemAccessControlFactory;
import com.facebook.presto.transaction.TransactionId;
import com.facebook.presto.transaction.TransactionManager;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.weakref.jmx.Managed;
import org.weakref.jmx.Nested;
import javax.inject.Inject;
import java.io.File;
import java.security.Principal;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static com.facebook.presto.metadata.MetadataUtil.toSchemaTableName;
import static com.facebook.presto.spi.StandardErrorCode.SERVER_STARTING_UP;
import static com.facebook.presto.util.PropertiesUtil.loadProperties;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
public class AccessControlManager
implements AccessControl
{
private static final Logger log = Logger.get(AccessControlManager.class);
private static final File ACCESS_CONTROL_CONFIGURATION = new File("etc/access-control.properties");
private static final String ACCESS_CONTROL_PROPERTY_NAME = "access-control.name";
private final TransactionManager transactionManager;
private final Map<String, SystemAccessControlFactory> systemAccessControlFactories = new ConcurrentHashMap<>();
private final Map<ConnectorId, CatalogAccessControlEntry> connectorAccessControl = new ConcurrentHashMap<>();
private final AtomicReference<SystemAccessControl> systemAccessControl = new AtomicReference<>(new InitializingSystemAccessControl());
private final AtomicBoolean systemAccessControlLoading = new AtomicBoolean();
private final CounterStat authenticationSuccess = new CounterStat();
private final CounterStat authenticationFail = new CounterStat();
private final CounterStat authorizationSuccess = new CounterStat();
private final CounterStat authorizationFail = new CounterStat();
@Inject
public AccessControlManager(TransactionManager transactionManager)
{
this.transactionManager = requireNonNull(transactionManager, "transactionManager is null");
addSystemAccessControlFactory(new AllowAllSystemAccessControl.Factory());
addSystemAccessControlFactory(new ReadOnlySystemAccessControl.Factory());
addSystemAccessControlFactory(new FileBasedSystemAccessControl.Factory());
}
public void addSystemAccessControlFactory(SystemAccessControlFactory accessControlFactory)
{
requireNonNull(accessControlFactory, "accessControlFactory is null");
if (systemAccessControlFactories.putIfAbsent(accessControlFactory.getName(), accessControlFactory) != null) {
throw new IllegalArgumentException(format("Access control '%s' is already registered", accessControlFactory.getName()));
}
}
public void addCatalogAccessControl(ConnectorId connectorId, ConnectorAccessControl accessControl)
{
requireNonNull(connectorId, "connectorId is null");
requireNonNull(accessControl, "accessControl is null");
checkState(connectorAccessControl.putIfAbsent(connectorId, new CatalogAccessControlEntry(connectorId, accessControl)) == null,
"Access control for connector '%s' is already registered", connectorId);
}
public void removeCatalogAccessControl(ConnectorId connectorId)
{
connectorAccessControl.remove(connectorId);
}
public void loadSystemAccessControl()
throws Exception
{
if (ACCESS_CONTROL_CONFIGURATION.exists()) {
Map<String, String> properties = loadProperties(ACCESS_CONTROL_CONFIGURATION);
checkArgument(!isNullOrEmpty(properties.get(ACCESS_CONTROL_PROPERTY_NAME)),
"Access control configuration %s does not contain %s",
ACCESS_CONTROL_CONFIGURATION.getAbsoluteFile(),
ACCESS_CONTROL_PROPERTY_NAME);
loadSystemAccessControl(properties);
}
else {
setSystemAccessControl(AllowAllSystemAccessControl.NAME, ImmutableMap.of());
}
}
public void loadSystemAccessControl(Map<String, String> properties)
{
properties = new HashMap<>(properties);
String accessControlName = properties.remove(ACCESS_CONTROL_PROPERTY_NAME);
checkArgument(!isNullOrEmpty(accessControlName), "%s property must be present", ACCESS_CONTROL_PROPERTY_NAME);
setSystemAccessControl(accessControlName, properties);
}
@VisibleForTesting
protected void setSystemAccessControl(String name, Map<String, String> properties)
{
requireNonNull(name, "name is null");
requireNonNull(properties, "properties is null");
checkState(systemAccessControlLoading.compareAndSet(false, true), "System access control already initialized");
log.info("-- Loading system access control --");
SystemAccessControlFactory systemAccessControlFactory = systemAccessControlFactories.get(name);
checkState(systemAccessControlFactory != null, "Access control %s is not registered", name);
SystemAccessControl systemAccessControl = systemAccessControlFactory.create(ImmutableMap.copyOf(properties));
this.systemAccessControl.set(systemAccessControl);
log.info("-- Loaded system access control %s --", name);
}
@Override
public void checkCanSetUser(AccessControlContext context, Optional<Principal> principal, String userName)
{
requireNonNull(principal, "principal is null");
requireNonNull(userName, "userName is null");
authenticationCheck(() -> systemAccessControl.get().checkCanSetUser(context, principal, userName));
}
@Override
public void checkQueryIntegrity(Identity identity, AccessControlContext context, String query)
{
requireNonNull(identity, "identity is null");
requireNonNull(query, "query is null");
authenticationCheck(() -> systemAccessControl.get().checkQueryIntegrity(identity, context, query));
}
@Override
public Set<String> filterCatalogs(Identity identity, AccessControlContext context, Set<String> catalogs)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogs, "catalogs is null");
return systemAccessControl.get().filterCatalogs(identity, context, catalogs);
}
@Override
public void checkCanAccessCatalog(Identity identity, AccessControlContext context, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalog is null");
authenticationCheck(() -> systemAccessControl.get().checkCanAccessCatalog(identity, context, catalogName));
}
@Override
public void checkCanCreateSchema(TransactionId transactionId, Identity identity, AccessControlContext context, CatalogSchemaName schemaName)
{
requireNonNull(identity, "identity is null");
requireNonNull(schemaName, "schemaName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, schemaName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanCreateSchema(identity, context, schemaName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, schemaName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanCreateSchema(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(schemaName.getCatalogName()), context, schemaName.getSchemaName()));
}
}
@Override
public void checkCanDropSchema(TransactionId transactionId, Identity identity, AccessControlContext context, CatalogSchemaName schemaName)
{
requireNonNull(identity, "identity is null");
requireNonNull(schemaName, "schemaName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, schemaName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanDropSchema(identity, context, schemaName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, schemaName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanDropSchema(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(schemaName.getCatalogName()), context, schemaName.getSchemaName()));
}
}
@Override
public void checkCanRenameSchema(TransactionId transactionId, Identity identity, AccessControlContext context, CatalogSchemaName schemaName, String newSchemaName)
{
requireNonNull(identity, "identity is null");
requireNonNull(schemaName, "schemaName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, schemaName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanRenameSchema(identity, context, schemaName, newSchemaName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, schemaName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanRenameSchema(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(schemaName.getCatalogName()), context, schemaName.getSchemaName(), newSchemaName));
}
}
@Override
public void checkCanShowSchemas(TransactionId transactionId, Identity identity, AccessControlContext context, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
authorizationCheck(() -> systemAccessControl.get().checkCanShowSchemas(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanShowSchemas(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context));
}
}
@Override
public Set<String> filterSchemas(TransactionId transactionId, Identity identity, AccessControlContext context, String catalogName, Set<String> schemaNames)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalogName is null");
requireNonNull(schemaNames, "schemaNames is null");
if (filterCatalogs(identity, context, ImmutableSet.of(catalogName)).isEmpty()) {
return ImmutableSet.of();
}
schemaNames = systemAccessControl.get().filterSchemas(identity, context, catalogName, schemaNames);
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
schemaNames = entry.getAccessControl().filterSchemas(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, schemaNames);
}
return schemaNames;
}
@Override
public void checkCanCreateTable(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanCreateTable(identity, context, toCatalogSchemaTableName(tableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanCreateTable(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName)));
}
}
@Override
public void checkCanDropTable(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanDropTable(identity, context, toCatalogSchemaTableName(tableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanDropTable(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName)));
}
}
@Override
public void checkCanRenameTable(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName, QualifiedObjectName newTableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
requireNonNull(newTableName, "newTableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanRenameTable(identity, context, toCatalogSchemaTableName(tableName), toCatalogSchemaTableName(newTableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanRenameTable(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName), toSchemaTableName(newTableName)));
}
}
@Override
public void checkCanShowTablesMetadata(TransactionId transactionId, Identity identity, AccessControlContext context, CatalogSchemaName schema)
{
requireNonNull(identity, "identity is null");
requireNonNull(schema, "schema is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, schema.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanShowTablesMetadata(identity, context, schema));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, schema.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanShowTablesMetadata(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(), context, schema.getSchemaName()));
}
}
@Override
public Set<SchemaTableName> filterTables(TransactionId transactionId, Identity identity, AccessControlContext context, String catalogName, Set<SchemaTableName> tableNames)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalogName is null");
requireNonNull(tableNames, "tableNames is null");
if (filterCatalogs(identity, context, ImmutableSet.of(catalogName)).isEmpty()) {
return ImmutableSet.of();
}
tableNames = systemAccessControl.get().filterTables(identity, context, catalogName, tableNames);
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
tableNames = entry.getAccessControl().filterTables(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, tableNames);
}
return tableNames;
}
@Override
public void checkCanAddColumns(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanAddColumn(identity, context, toCatalogSchemaTableName(tableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanAddColumn(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName)));
}
}
@Override
public void checkCanDropColumn(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanDropColumn(identity, context, toCatalogSchemaTableName(tableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanDropColumn(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName)));
}
}
@Override
public void checkCanRenameColumn(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanRenameColumn(identity, context, toCatalogSchemaTableName(tableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanRenameColumn(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName)));
}
}
@Override
public void checkCanInsertIntoTable(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanInsertIntoTable(identity, context, toCatalogSchemaTableName(tableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanInsertIntoTable(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName)));
}
}
@Override
public void checkCanDeleteFromTable(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanDeleteFromTable(identity, context, toCatalogSchemaTableName(tableName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanDeleteFromTable(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName)));
}
}
@Override
public void checkCanCreateView(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName viewName)
{
requireNonNull(identity, "identity is null");
requireNonNull(viewName, "viewName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, viewName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanCreateView(identity, context, toCatalogSchemaTableName(viewName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, viewName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanCreateView(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(viewName.getCatalogName()), context, toSchemaTableName(viewName)));
}
}
@Override
public void checkCanDropView(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName viewName)
{
requireNonNull(identity, "identity is null");
requireNonNull(viewName, "viewName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, viewName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanDropView(identity, context, toCatalogSchemaTableName(viewName)));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, viewName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanDropView(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(viewName.getCatalogName()), context, toSchemaTableName(viewName)));
}
}
@Override
public void checkCanCreateViewWithSelectFromColumns(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName, Set<String> columnNames)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanCreateViewWithSelectFromColumns(identity, context, toCatalogSchemaTableName(tableName), columnNames));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanCreateViewWithSelectFromColumns(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName), columnNames));
}
}
@Override
public void checkCanGrantTablePrivilege(TransactionId transactionId, Identity identity, AccessControlContext context, Privilege privilege, QualifiedObjectName tableName, PrestoPrincipal grantee, boolean withGrantOption)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
requireNonNull(privilege, "privilege is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanGrantTablePrivilege(identity, context, privilege, toCatalogSchemaTableName(tableName), grantee, withGrantOption));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanGrantTablePrivilege(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, privilege, toSchemaTableName(tableName), grantee, withGrantOption));
}
}
@Override
public void checkCanRevokeTablePrivilege(TransactionId transactionId, Identity identity, AccessControlContext context, Privilege privilege, QualifiedObjectName tableName, PrestoPrincipal revokee, boolean grantOptionFor)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
requireNonNull(privilege, "privilege is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanRevokeTablePrivilege(identity, context, privilege, toCatalogSchemaTableName(tableName), revokee, grantOptionFor));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanRevokeTablePrivilege(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, privilege, toSchemaTableName(tableName), revokee, grantOptionFor));
}
}
@Override
public void checkCanSetSystemSessionProperty(Identity identity, AccessControlContext context, String propertyName)
{
requireNonNull(identity, "identity is null");
requireNonNull(propertyName, "propertyName is null");
authorizationCheck(() -> systemAccessControl.get().checkCanSetSystemSessionProperty(identity, context, propertyName));
}
@Override
public void checkCanSetCatalogSessionProperty(TransactionId transactionId, Identity identity, AccessControlContext context, String catalogName, String propertyName)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalogName is null");
requireNonNull(propertyName, "propertyName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
authorizationCheck(() -> systemAccessControl.get().checkCanSetCatalogSessionProperty(identity, context, catalogName, propertyName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanSetCatalogSessionProperty(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, propertyName));
}
}
@Override
public void checkCanSelectFromColumns(TransactionId transactionId, Identity identity, AccessControlContext context, QualifiedObjectName tableName, Set<String> columnNames)
{
requireNonNull(identity, "identity is null");
requireNonNull(tableName, "tableName is null");
requireNonNull(columnNames, "columnNames is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, tableName.getCatalogName()));
authorizationCheck(() -> systemAccessControl.get().checkCanSelectFromColumns(identity, context, toCatalogSchemaTableName(tableName), columnNames));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, tableName.getCatalogName());
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanSelectFromColumns(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(tableName.getCatalogName()), context, toSchemaTableName(tableName), columnNames));
}
}
@Override
public void checkCanCreateRole(TransactionId transactionId, Identity identity, AccessControlContext context, String role, Optional<PrestoPrincipal> grantor, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(role, "role is null");
requireNonNull(grantor, "grantor is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanCreateRole(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, role, grantor));
}
}
@Override
public void checkCanDropRole(TransactionId transactionId, Identity identity, AccessControlContext context, String role, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(role, "role is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanDropRole(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, role));
}
}
@Override
public void checkCanGrantRoles(TransactionId transactionId, Identity identity, AccessControlContext context, Set<String> roles, Set<PrestoPrincipal> grantees, boolean withAdminOption, Optional<PrestoPrincipal> grantor, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(roles, "roles is null");
requireNonNull(grantees, "grantees is null");
requireNonNull(grantor, "grantor is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanGrantRoles(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, roles, grantees, withAdminOption, grantor, catalogName));
}
}
@Override
public void checkCanRevokeRoles(TransactionId transactionId, Identity identity, AccessControlContext context, Set<String> roles, Set<PrestoPrincipal> grantees, boolean adminOptionFor, Optional<PrestoPrincipal> grantor, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(roles, "roles is null");
requireNonNull(grantees, "grantees is null");
requireNonNull(grantor, "grantor is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanRevokeRoles(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, roles, grantees, adminOptionFor, grantor, catalogName));
}
}
@Override
public void checkCanSetRole(TransactionId transactionId, Identity identity, AccessControlContext context, String role, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(role, "role is null");
requireNonNull(catalogName, "catalog is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authorizationCheck(() -> entry.getAccessControl().checkCanSetRole(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, role, catalogName));
}
}
@Override
public void checkCanShowRoles(TransactionId transactionId, Identity identity, AccessControlContext context, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authenticationCheck(() -> entry.getAccessControl().checkCanShowRoles(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, catalogName));
}
}
@Override
public void checkCanShowCurrentRoles(TransactionId transactionId, Identity identity, AccessControlContext context, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authenticationCheck(() -> entry.getAccessControl().checkCanShowCurrentRoles(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, catalogName));
}
}
@Override
public void checkCanShowRoleGrants(TransactionId transactionId, Identity identity, AccessControlContext context, String catalogName)
{
requireNonNull(identity, "identity is null");
requireNonNull(catalogName, "catalogName is null");
authenticationCheck(() -> checkCanAccessCatalog(identity, context, catalogName));
CatalogAccessControlEntry entry = getConnectorAccessControl(transactionId, catalogName);
if (entry != null) {
authenticationCheck(() -> entry.getAccessControl().checkCanShowRoleGrants(entry.getTransactionHandle(transactionId), identity.toConnectorIdentity(catalogName), context, catalogName));
}
}
private CatalogAccessControlEntry getConnectorAccessControl(TransactionId transactionId, String catalogName)
{
return transactionManager.getOptionalCatalogMetadata(transactionId, catalogName)
.map(metadata -> connectorAccessControl.get(metadata.getConnectorId()))
.orElse(null);
}
@Managed
@Nested
public CounterStat getAuthenticationSuccess()
{
return authenticationSuccess;
}
@Managed
@Nested
public CounterStat getAuthenticationFail()
{
return authenticationFail;
}
@Managed
@Nested
public CounterStat getAuthorizationSuccess()
{
return authorizationSuccess;
}
@Managed
@Nested
public CounterStat getAuthorizationFail()
{
return authorizationFail;
}
private void authenticationCheck(Runnable runnable)
{
try {
runnable.run();
authenticationSuccess.update(1);
}
catch (PrestoException e) {
authenticationFail.update(1);
throw e;
}
}
private void authorizationCheck(Runnable runnable)
{
try {
runnable.run();
authorizationSuccess.update(1);
}
catch (PrestoException e) {
authorizationFail.update(1);
throw e;
}
}
private CatalogSchemaTableName toCatalogSchemaTableName(QualifiedObjectName qualifiedObjectName)
{
return new CatalogSchemaTableName(qualifiedObjectName.getCatalogName(), qualifiedObjectName.getSchemaName(), qualifiedObjectName.getObjectName());
}
private class CatalogAccessControlEntry
{
private final ConnectorId connectorId;
private final ConnectorAccessControl accessControl;
public CatalogAccessControlEntry(ConnectorId connectorId, ConnectorAccessControl accessControl)
{
this.connectorId = requireNonNull(connectorId, "connectorId is null");
this.accessControl = requireNonNull(accessControl, "accessControl is null");
}
public ConnectorId getConnectorId()
{
return connectorId;
}
public ConnectorAccessControl getAccessControl()
{
return accessControl;
}
public ConnectorTransactionHandle getTransactionHandle(TransactionId transactionId)
{
return transactionManager.getConnectorTransaction(transactionId, connectorId);
}
}
private static class InitializingSystemAccessControl
implements SystemAccessControl
{
@Override
public void checkQueryIntegrity(Identity identity, AccessControlContext context, String query)
{
throw new PrestoException(SERVER_STARTING_UP, "Presto server is still initializing");
}
@Override
public void checkCanSetUser(AccessControlContext context, Optional<Principal> principal, String userName)
{
throw new PrestoException(SERVER_STARTING_UP, "Presto server is still initializing");
}
@Override
public void checkCanSetSystemSessionProperty(Identity identity, AccessControlContext context, String propertyName)
{
throw new PrestoException(SERVER_STARTING_UP, "Presto server is still initializing");
}
@Override
public void checkCanAccessCatalog(Identity identity, AccessControlContext context, String catalogName)
{
throw new PrestoException(SERVER_STARTING_UP, "Presto server is still initializing");
}
}
}
| apache-2.0 |
GunoH/intellij-community | platform/analysis-api/src/com/intellij/lang/annotation/AnnotationBuilder.java | 8772 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.lang.annotation;
import com.intellij.codeInsight.daemon.HighlightDisplayKey;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInspection.CommonProblemDescriptor;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.GutterIconRenderer;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
@ApiStatus.NonExtendable
public
interface AnnotationBuilder {
/**
* Specify annotation range. When not called, the current element range is used,
* i.e. of the element your {@link Annotator#annotate(PsiElement, AnnotationHolder)} method is called with.
* The passed {@code range} must be inside the range of the current element being annotated. An empty range will be highlighted as
* {@code endOffset = startOffset + 1}.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder range(@NotNull TextRange range);
/**
* Specify annotation range is equal to the {@code element.getTextRange()}.
* When not called, the current element range is used, i.e. of the element your {@link Annotator#annotate(PsiElement, AnnotationHolder)} method is called with.
* The range of the {@code element} must be inside the range of the current element being annotated.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder range(@NotNull ASTNode element);
/**
* Specify annotation range is equal to the {@code element.getTextRange()}.
* When not called, the current element range is used, i.e. of the element your {@link Annotator#annotate(PsiElement, AnnotationHolder)} method is called with.
* The range of the {@code element} must be inside the range of the current element being annotated.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder range(@NotNull PsiElement element);
/**
* Specify annotation should be shown after the end of line. Useful for creating warnings of the type "unterminated string literal".
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder afterEndOfLine();
/**
* Specify annotation should be shown differently - as a sticky popup at the top of the file.
* Useful for file-wide messages like "This file is in the wrong directory".
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder fileLevel();
/**
* Specify annotation should have an icon at the gutter.
* Useful for distinguish annotations linked to additional resources like "this is a test method. Click on the icon gutter to run".
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder gutterIconRenderer(@NotNull GutterIconRenderer gutterIconRenderer);
/**
* Specify problem group for the annotation to group corresponding inspections.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder problemGroup(@NotNull ProblemGroup problemGroup);
/**
* Override text attributes for the annotation to change the defaults specified for the given severity.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder enforcedTextAttributes(@NotNull TextAttributes enforcedAttributes);
/**
* Specify text attributes for the annotation to change the defaults specified for the given severity.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder textAttributes(@NotNull TextAttributesKey enforcedAttributes);
/**
* Specify the problem highlight type for the annotation. If not specified, the default type for the severity is used..
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder highlightType(@NotNull ProblemHighlightType highlightType);
/**
* Specify tooltip for the annotation to popup on mouse hover.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder tooltip(@NotNull @NlsContexts.Tooltip String tooltip);
/**
* Optimization method specifying whether the annotation should be re-calculated when the user types in it.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder needsUpdateOnTyping();
/**
* Optimization method which explicitly specifies whether the annotation should be re-calculated when the user types in it.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder needsUpdateOnTyping(boolean value);
/**
* Registers quick fix for this annotation.
* If you want to tweak the fix, e.g. modify its range, please use {@link #newFix(IntentionAction)} instead.
* This is an intermediate method in the creating new annotation pipeline.
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder withFix(@NotNull IntentionAction fix);
/**
* Begin registration of the new quickfix associated with the annotation.
* A typical code looks like this: <p>{@code holder.newFix(action).range(fixRange).registerFix()}</p>
*
* @param fix an intention action to be shown for the annotation as a quick fix
*/
@Contract(pure = true)
@NotNull
FixBuilder newFix(@NotNull IntentionAction fix);
/**
* Begin registration of the new quickfix associated with the annotation.
* A typical code looks like this: <p>{@code holder.newLocalQuickFix(fix).range(fixRange).registerFix()}</p>
*
* @param fix to be shown for the annotation as a quick fix
* @param problemDescriptor to be passed to {@link LocalQuickFix#applyFix(Project, CommonProblemDescriptor)}
*/
@Contract(pure = true)
@NotNull
FixBuilder newLocalQuickFix(@NotNull LocalQuickFix fix, @NotNull ProblemDescriptor problemDescriptor);
interface FixBuilder {
/**
* Specify the range for this quick fix. If not specified, the annotation range is used.
* This is an intermediate method in the registering new quick fix pipeline.
*/
@Contract(pure = true)
@NotNull
FixBuilder range(@NotNull TextRange range);
@Contract(pure = true)
@NotNull
FixBuilder key(@NotNull HighlightDisplayKey key);
/**
* Specify that the quickfix will be available during batch mode only.
* This is an intermediate method in the registering new quick fix pipeline.
*/
@Contract(pure = true)
@NotNull
FixBuilder batch();
/**
* Specify that the quickfix will be available both during batch mode and on-the-fly.
* This is an intermediate method in the registering new quick fix pipeline.
*/
@Contract(pure = true)
@NotNull
FixBuilder universal();
/**
* Finish registration of the new quickfix associated with the annotation.
* After calling this method you can continue constructing the annotation - e.g. register new fixes.
* For example:
* <pre>{@code holder.newAnnotation(range, WARNING, "Illegal element")
* .newFix(myRenameFix).key(DISPLAY_KEY).registerFix()
* .newFix(myDeleteFix).range(deleteRange).registerFix()
* .create();
* }</pre>
*/
@Contract(pure = true)
@NotNull
AnnotationBuilder registerFix();
}
/**
* Finish creating new annotation.
* Calling this method means you've completed your annotation and it's ready to be shown on screen.
*/
void create();
/**
* @deprecated Use {@link #create()} instead
*/
@Deprecated
Annotation createAnnotation();
}
| apache-2.0 |
raja15792/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201508/DeviceManufacturerPremiumFeature.java | 960 |
package com.google.api.ads.dfp.jaxws.v201508;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
*
* A premium feature applied to device manufacturer targeting.
*
*
* <p>Java class for DeviceManufacturerPremiumFeature complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="DeviceManufacturerPremiumFeature">
* <complexContent>
* <extension base="{https://www.google.com/apis/ads/publisher/v201508}PremiumFeature">
* <sequence>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "DeviceManufacturerPremiumFeature")
public class DeviceManufacturerPremiumFeature
extends PremiumFeature
{
}
| apache-2.0 |
ChetnaChaudhari/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocol.java | 7104 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.protocol;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature;
import org.apache.hadoop.io.retry.AtMostOnce;
import org.apache.hadoop.io.retry.Idempotent;
import org.apache.hadoop.security.KerberosInfo;
/*****************************************************************************
* Protocol that a secondary NameNode uses to communicate with the NameNode.
* It's used to get part of the name node state
*****************************************************************************/
@KerberosInfo(
serverPrincipal = DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY)
@InterfaceAudience.Private
public interface NamenodeProtocol {
/**
* Until version 6L, this class served as both
* the client interface to the NN AND the RPC protocol used to
* communicate with the NN.
*
* This class is used by both the DFSClient and the
* NN server side to insulate from the protocol serialization.
*
* If you are adding/changing NN's interface then you need to
* change both this class and ALSO related protocol buffer
* wire protocol definition in NamenodeProtocol.proto.
*
* For more details on protocol buffer wire protocol, please see
* .../org/apache/hadoop/hdfs/protocolPB/overview.html
*
* 6: Switch to txid-based file naming for image and edits
*/
public static final long versionID = 6L;
// Error codes passed by errorReport().
final static int NOTIFY = 0;
final static int FATAL = 1;
public final static int ACT_UNKNOWN = 0; // unknown action
public final static int ACT_SHUTDOWN = 50; // shutdown node
public final static int ACT_CHECKPOINT = 51; // do checkpoint
/**
* Get a list of blocks belonging to <code>datanode</code>
* whose total size equals <code>size</code>.
*
* @see org.apache.hadoop.hdfs.server.balancer.Balancer
* @param datanode a data node
* @param size requested size
* @param minBlockSize each block should be of this minimum Block Size
* @return a list of blocks & their locations
* @throws IOException if size is less than or equal to 0 or
datanode does not exist
*/
@Idempotent
BlocksWithLocations getBlocks(DatanodeInfo datanode, long size, long
minBlockSize) throws IOException;
/**
* Get the current block keys
*
* @return ExportedBlockKeys containing current block keys
* @throws IOException
*/
@Idempotent
public ExportedBlockKeys getBlockKeys() throws IOException;
/**
* @return The most recent transaction ID that has been synced to
* persistent storage, or applied from persistent storage in the
* case of a non-active node.
* @throws IOException
*/
@Idempotent
public long getTransactionID() throws IOException;
/**
* Get the transaction ID of the most recent checkpoint.
*/
@Idempotent
public long getMostRecentCheckpointTxId() throws IOException;
/**
* Closes the current edit log and opens a new one. The
* call fails if the file system is in SafeMode.
* @throws IOException
* @return a unique token to identify this transaction.
*/
@Idempotent
public CheckpointSignature rollEditLog() throws IOException;
/**
* Request name-node version and storage information.
*
* @return {@link NamespaceInfo} identifying versions and storage information
* of the name-node
* @throws IOException
*/
@Idempotent
public NamespaceInfo versionRequest() throws IOException;
/**
* Report to the active name-node an error occurred on a subordinate node.
* Depending on the error code the active node may decide to unregister the
* reporting node.
*
* @param registration requesting node.
* @param errorCode indicates the error
* @param msg free text description of the error
* @throws IOException
*/
@Idempotent
public void errorReport(NamenodeRegistration registration,
int errorCode,
String msg) throws IOException;
/**
* Register a subordinate name-node like backup node.
*
* @return {@link NamenodeRegistration} of the node,
* which this node has just registered with.
*/
@Idempotent
public NamenodeRegistration registerSubordinateNamenode(
NamenodeRegistration registration) throws IOException;
/**
* A request to the active name-node to start a checkpoint.
* The name-node should decide whether to admit it or reject.
* The name-node also decides what should be done with the backup node
* image before and after the checkpoint.
*
* @see CheckpointCommand
* @see NamenodeCommand
* @see #ACT_SHUTDOWN
*
* @param registration the requesting node
* @return {@link CheckpointCommand} if checkpoint is allowed.
* @throws IOException
*/
@AtMostOnce
public NamenodeCommand startCheckpoint(NamenodeRegistration registration)
throws IOException;
/**
* A request to the active name-node to finalize
* previously started checkpoint.
*
* @param registration the requesting node
* @param sig {@code CheckpointSignature} which identifies the checkpoint.
* @throws IOException
*/
@AtMostOnce
public void endCheckpoint(NamenodeRegistration registration,
CheckpointSignature sig) throws IOException;
/**
* Return a structure containing details about all edit logs
* available to be fetched from the NameNode.
* @param sinceTxId return only logs that contain transactions >= sinceTxId
*/
@Idempotent
public RemoteEditLogManifest getEditLogManifest(long sinceTxId)
throws IOException;
/**
* @return Whether the NameNode is in upgrade state (false) or not (true)
*/
@Idempotent
public boolean isUpgradeFinalized() throws IOException;
/**
* return whether the Namenode is rolling upgrade in progress (true) or
* not (false).
* @return
* @throws IOException
*/
@Idempotent
boolean isRollingUpgrade() throws IOException;
}
| apache-2.0 |
grzesuav/jpf-core | src/main/gov/nasa/jpf/util/Trace.java | 5061 | /*
* Copyright (C) 2014, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All rights reserved.
*
* The Java Pathfinder core (jpf-core) platform is licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpf.util;
import gov.nasa.jpf.ListenerAdapter;
import gov.nasa.jpf.search.Search;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
/**
* a generic, listener- created trace over property specific operations
*
* we could register this as a listener itself, but since it usually is used from
* a listener, we might as well just delegate from there
*/
public class Trace<T> extends ListenerAdapter implements Iterable<T> {
TraceElement<T> lastElement;
TraceElement<T> lastTransition;
// for HeuristicSearches. Ok, that's braindead but at least no need for cloning
HashMap<Integer,TraceElement<T>> storedTransition;
// iterator that traverses the trace LIFO, i.e. starting from the last T
class TraceIterator implements Iterator<T> {
TraceElement<T> cur;
TraceIterator () {
cur = lastElement;
}
@Override
public boolean hasNext () {
return (cur != null);
}
@Override
public T next () {
if (cur != null){
T op = cur.op;
cur = cur.prevElement;
return op;
} else {
return null;
}
}
@Override
public void remove () {
throw new UnsupportedOperationException("TraceElement removal not supported");
}
}
@Override
public Iterator<T> iterator() {
return new TraceIterator();
}
public void addOp (T o){
TraceElement<T> op = new TraceElement<T>(o);
if (lastElement == null){
lastElement = op;
} else {
assert lastElement.stateId == 0;
op.prevElement = lastElement;
lastElement = op;
}
}
public void removeLastOp() {
if (lastElement != null){
lastElement = lastElement.prevElement;
}
}
public T getLastOp() {
if (lastElement != null) {
return lastElement.getOp();
}
return null;
}
public int size() {
int n=0;
for (TraceElement<T> te = lastElement; te != null; te = te.prevElement) {
n++;
}
return n;
}
public List<T> getOps () {
// this is a rather braindead way around the limitation that we can't explicitly
// create an T[] array object
ArrayList<T> list = new ArrayList<T>();
for (TraceElement<T> te = lastElement; te != null; te = te.prevElement) {
list.add(te.getOp());
}
// reverse
for (int i=0, j=list.size()-1; i<j; i++, j--) {
T tmp = list.get(j);
list.set(j, list.get(i));
list.set(i, tmp);
}
return list;
}
@Override
public void stateAdvanced (Search search) {
if (search.isNewState() && (lastElement != null)) {
int stateId = search.getStateId();
for (TraceElement<T> op=lastElement; op != null; op=op.prevElement) {
assert op.stateId == 0;
op.stateId = stateId;
}
lastElement.prevTransition = lastTransition;
lastTransition = lastElement;
}
lastElement = null;
}
@Override
public void stateBacktracked (Search search){
int stateId = search.getStateId();
while ((lastTransition != null) && (lastTransition.stateId > stateId)){
lastTransition = lastTransition.prevTransition;
}
lastElement = null;
}
@Override
public void stateStored (Search search) {
if (storedTransition == null){
storedTransition = new HashMap<Integer,TraceElement<T>>();
}
// always called after stateAdvanced
storedTransition.put(search.getStateId(), lastTransition);
}
@Override
public void stateRestored (Search search) {
int stateId = search.getStateId();
TraceElement<T> op = storedTransition.get(stateId);
if (op != null) {
lastTransition = op;
storedTransition.remove(stateId); // not strictly required, but we don't come back
}
}
@Override
public Trace clone() {
TraceElement<T> e0 = null, eLast = null;
for (TraceElement<T> e = lastElement; e != null; e = e.prevElement){
TraceElement<T> ec = e.clone();
if (eLast != null){
eLast.prevElement = ec;
eLast = ec;
} else {
e0 = eLast = ec;
}
}
Trace<T> t = new Trace<T>();
t.lastElement = e0;
return t;
}
}
| apache-2.0 |
ctomc/jboss-jstl-api_spec | src/main/java/org/apache/taglibs/standard/lang/jstl/FunctionInvocation.java | 5182 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.taglibs.standard.lang.jstl;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* <p>Represents a function call.</p>
*
* @author Shawn Bayern (in the style of Nathan's other classes)
*/
public class FunctionInvocation
extends Expression {
//-------------------------------------
// Properties
//-------------------------------------
// property index
private String functionName;
private List argumentList;
public String getFunctionName() {
return functionName;
}
public void setFunctionName(String f) {
functionName = f;
}
public List getArgumentList() {
return argumentList;
}
public void setArgumentList(List l) {
argumentList = l;
}
//-------------------------------------
/**
* Constructor
*/
public FunctionInvocation(String functionName, List argumentList) {
this.functionName = functionName;
this.argumentList = argumentList;
}
//-------------------------------------
// Expression methods
//-------------------------------------
/**
* Returns the expression in the expression language syntax
*/
public String getExpressionString() {
StringBuffer b = new StringBuffer();
b.append(functionName);
b.append("(");
Iterator i = argumentList.iterator();
while (i.hasNext()) {
b.append(((Expression) i.next()).getExpressionString());
if (i.hasNext()) {
b.append(", ");
}
}
b.append(")");
return b.toString();
}
//-------------------------------------
/**
* Evaluates by looking up the name in the VariableResolver
*/
public Object evaluate(Object pContext,
VariableResolver pResolver,
Map functions,
String defaultPrefix,
Logger pLogger)
throws ELException {
// if the Map is null, then the function is invalid
if (functions == null) {
pLogger.logError(Constants.UNKNOWN_FUNCTION, functionName);
}
// normalize function name against default prefix
String functionName = this.functionName;
if (functionName.indexOf(":") == -1) {
if (defaultPrefix == null) {
pLogger.logError(Constants.UNKNOWN_FUNCTION, functionName);
}
functionName = defaultPrefix + ":" + functionName;
}
// ensure that the function's name is mapped
Method target = (Method) functions.get(functionName);
if (target == null) {
pLogger.logError(Constants.UNKNOWN_FUNCTION, functionName);
}
// ensure that the number of arguments matches the number of parameters
Class[] params = target.getParameterTypes();
if (params.length != argumentList.size()) {
pLogger.logError(Constants.INAPPROPRIATE_FUNCTION_ARG_COUNT,
new Integer(params.length),
new Integer(argumentList.size()));
}
// now, walk through each parameter, evaluating and casting its argument
Object[] arguments = new Object[argumentList.size()];
for (int i = 0; i < params.length; i++) {
// evaluate
arguments[i] = ((Expression) argumentList.get(i)).evaluate(pContext,
pResolver,
functions,
defaultPrefix,
pLogger);
// coerce
arguments[i] = Coercions.coerce(arguments[i], params[i], pLogger);
}
// finally, invoke the target method, which we know to be static
try {
return (target.invoke(null, arguments));
} catch (InvocationTargetException ex) {
pLogger.logError(Constants.FUNCTION_INVOCATION_ERROR,
ex.getTargetException(),
functionName);
return null;
} catch (Exception ex) {
pLogger.logError(Constants.FUNCTION_INVOCATION_ERROR, ex, functionName);
return null;
}
}
//-------------------------------------
}
| apache-2.0 |
manstis/drools | kie-dmn/kie-dmn-signavio/src/test/java/org/kie/dmn/signavio/SignavioTest.java | 13022 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.signavio;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.Message.Level;
import org.kie.api.builder.Results;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.runtime.KieContainer;
import org.kie.dmn.api.core.DMNContext;
import org.kie.dmn.api.core.DMNModel;
import org.kie.dmn.api.core.DMNResult;
import org.kie.dmn.api.core.DMNRuntime;
import org.kie.dmn.model.api.DRGElement;
import org.kie.dmn.model.api.Definitions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.everyItem;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.iterableWithSize;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class SignavioTest {
public static final Logger LOG = LoggerFactory.getLogger(SignavioTest.class);
@Test
public void test() {
DMNRuntime runtime = createRuntime("Test_Signavio_multiple.dmn");
List<DMNModel> models = runtime.getModels();
DMNContext context = runtime.newContext();
context.set("persons", Arrays.asList(new String[]{"p1", "p2"}));
DMNModel model0 = models.get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
assertThat( (List<?>) evaluateAll.getContext().get( "Greeting for each Person in Persons" ), contains( "Hello p1", "Hello p2" ) );
}
@Test
public void testUnmarshall() {
DMNRuntime runtime = createRuntime("Test_Signavio_multiple.dmn");
DMNModel model0 = runtime.getModels().get(0);
Definitions definitions = model0.getDefinitions();
DRGElement decision = definitions.getDrgElement().stream().filter(e -> e.getName().equals("greetingForEachPersonInPersons")).findFirst().orElseThrow(IllegalStateException::new);
Object extElement = decision.getExtensionElements().getAny().get(0);
assertThat(extElement, is(instanceOf(MultiInstanceDecisionLogic.class)));
MultiInstanceDecisionLogic mid = (MultiInstanceDecisionLogic) extElement;
LOG.info("{}", mid);
assertThat(mid.getIterationExpression(), is("persons"));
assertThat(mid.getIteratorShapeId(), is("id-707bbdf74438414623ac5d7067805b38"));
assertThat(mid.getAggregationFunction(), is("COLLECT"));
assertThat(mid.getTopLevelDecisionId(), is("id-7a23e2f201e3e0db3c991313cff5cd2b"));
}
@Test
public void testUsingSignavioFunctions() {
DMNRuntime runtime = createRuntime("Using_Signavio_functions.dmn");
List<DMNModel> models = runtime.getModels();
DMNContext context = runtime.newContext();
DMNModel model0 = models.get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
LOG.info("{}", evaluateAll.getContext());
evaluateAll.getMessages().forEach(System.out::println);
assertEquals(true, evaluateAll.getContext().get("myContext"));
}
/**
* Check the custom Signavio functions work in the LiteralExpression too
*/
@Test
public void testUsingSignavioFunctionsInLiteralExpression() {
DMNRuntime runtime = createRuntime("Starts_with_an_A.dmn");
assertStartsWithAnA(runtime, "Abc", true);
assertStartsWithAnA(runtime, "Xyz", false);
}
private void assertStartsWithAnA(final DMNRuntime runtime, final String testString, final boolean startsWithAnA) {
DMNContext context = runtime.newContext();
context.set("surname", testString);
DMNModel model0 = runtime.getModels().get(0);
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
evaluateAll.getMessages().forEach(System.out::println);
assertFalse(evaluateAll.getMessages().toString(), evaluateAll.hasErrors());
assertEquals(startsWithAnA, evaluateAll.getContext().get("startsWithAnA"));
}
@Test
public void testSurveyMIDSUM() {
DMNRuntime runtime = createRuntime("survey MID SUM.dmn");
checkSurveryMID(runtime, Arrays.asList(1, 2, 3), new BigDecimal(6));
}
private void checkSurveryMID(DMNRuntime runtime, Object numbers, Object iterating) {
List<DMNModel> models = runtime.getModels();
DMNContext context = runtime.newContext();
context.set("numbers", numbers);
DMNModel model0 = models.get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
assertThat(evaluateAll.getDecisionResultByName("iterating").getResult(), is(iterating));
}
private DMNRuntime createRuntime(String modelFileName) {
final KieServices ks = KieServices.Factory.get();
final KieFileSystem kfs = ks.newKieFileSystem();
KieModuleModel kmm = ks.newKieModuleModel();
kmm.setConfigurationProperty("org.kie.dmn.profiles.signavio", "org.kie.dmn.signavio.KieDMNSignavioProfile");
kfs.writeKModuleXML(kmm.toXML());
kfs.write(ks.getResources().newClassPathResource(modelFileName, this.getClass()));
KieBuilder kieBuilder = ks.newKieBuilder(kfs).buildAll();
Results results = kieBuilder.getResults();
LOG.info("buildAll() completed.");
results.getMessages(Level.WARNING).forEach(e -> LOG.warn("{}", e));
assertTrue(results.getMessages(Level.WARNING).size() == 0);
final KieContainer kieContainer = ks.newKieContainer(ks.getRepository().getDefaultReleaseId());
DMNRuntime runtime = kieContainer.newKieSession().getKieRuntime(DMNRuntime.class);
return runtime;
}
@Test
public void testSurveyMIDMIN() {
DMNRuntime runtime = createRuntime("survey MID MIN.dmn");
checkSurveryMID(runtime, Arrays.asList(1, 2, 3), new BigDecimal(1));
}
@Test
public void testSurveyMIDMAX() {
DMNRuntime runtime = createRuntime("survey MID MAX.dmn");
checkSurveryMID(runtime, Arrays.asList(1, 2, 3), new BigDecimal(3));
}
@Test
public void testSurveyMIDCOUNT() {
DMNRuntime runtime = createRuntime("survey MID COUNT.dmn");
checkSurveryMID(runtime, Arrays.asList(1, 1, 1), new BigDecimal(3));// the COUNT in MID is list size, checked on Simulator.
}
@Test
public void testSurveyMIDALLTRUE() {
DMNRuntime runtime = createRuntime("survey MID ALLTRUE.dmn");
checkSurveryMID(runtime, Arrays.asList(1, 2), true);
checkSurveryMID(runtime, Arrays.asList(-1, 2), false);
}
@Test
public void testSurveyMIDANYTRUE() {
DMNRuntime runtime = createRuntime("survey MID ANYTRUE.dmn");
checkSurveryMID(runtime, Arrays.asList(1, -2), true);
checkSurveryMID(runtime, Arrays.asList(-1, -2), false);
}
@Test
public void testSurveyMIDALLFALSE() {
DMNRuntime runtime = createRuntime("survey MID ALLFALSE.dmn");
checkSurveryMID(runtime, Arrays.asList(1, 2), false);
checkSurveryMID(runtime, Arrays.asList(-1, 2), false);
checkSurveryMID(runtime, Arrays.asList(1, -2), false);
checkSurveryMID(runtime, Arrays.asList(-1, -2), true);
}
@Test
public void testZipFunctions() {
DMNRuntime runtime = createRuntime("Test_SignavioZipFunctions.dmn");
checkBothFunctionsAreWorking(runtime);
}
@Test
@SuppressWarnings("unchecked")
public void testMidTakesCareOfRequirements() {
DMNRuntime runtime = createRuntime("Test_SignavioMID.dmn");
List<DMNModel> models = runtime.getModels();
DMNContext context = runtime.newContext();
context.set("numbers1", Arrays.asList(1,2));
context.set("numbers2", Arrays.asList(2,3));
DMNModel model0 = models.get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
List<Object> result = (List<Object>) evaluateAll.getDecisionResultByName("calculate").getResult();
assertThat(result, iterableWithSize(6));
assertThat(result, everyItem(notNullValue()));
}
@Test
public void testSignavioConcatFunction() {
DMNRuntime runtime = createRuntime("Signavio_Concat.dmn");
List<DMNModel> models = runtime.getModels();
DMNContext context = runtime.newContext();
context.set("listOfNames", Arrays.asList("John", "Jane", "Doe"));
DMNModel model0 = models.get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
assertEquals("JohnJaneDoe", evaluateAll.getDecisionResultByName("concatNames").getResult());
}
private void checkBothFunctionsAreWorking(DMNRuntime runtime) {
List<DMNModel> models = runtime.getModels();
DMNContext context = runtime.newContext();
context.set("names", Arrays.asList("John Doe", "Jane Doe"));
context.set("ages", Arrays.asList(37, 35));
DMNModel model0 = models.get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
assertThat((List<?>) evaluateAll.getDecisionResultByName("zipvararg").getResult(), iterableWithSize(2));
assertThat((List<?>) evaluateAll.getDecisionResultByName("zipsinglelist").getResult(), iterableWithSize(2));
}
@Test
public void testSignavioIterateMultiinstanceWithComplexInputs() {
DMNRuntime runtime = createRuntime("Iterate Complex List.dmn");
DMNContext context = runtime.newContext();
Map<String, Object> johnDoe = new HashMap<>();
johnDoe.put("iD", "id-john");
johnDoe.put("name", "John Doe");
Map<String, Object> alice = new HashMap<>();
alice.put("iD", "id-alice");
alice.put("name", "Alice");
context.set("customer", Collections.singletonMap("persons", Arrays.asList(johnDoe, alice)));
DMNModel model0 = runtime.getModels().get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
assertEquals(Arrays.asList("John Doe", "Alice"), evaluateAll.getDecisionResultByName("extractNames").getResult());
}
@Test
public void testSignavioIterateMultiinstanceMultipleDecisions() {
DMNRuntime runtime = createRuntime("MID with multiple inside decisions.dmn");
DMNContext context = runtime.newContext();
context.set("names", Arrays.asList("John", "Alice"));
DMNModel model0 = runtime.getModels().get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
assertThat(evaluateAll.getDecisionResultByName("overallage").getResult(), is(new BigDecimal("18")));
}
@Test
public void testSignavioIterateMultiinstanceMultipleDecisionsOutside() {
DMNRuntime runtime = createRuntime("MID with outside requirement.dmn");
DMNContext context = runtime.newContext();
context.set("numbers", Arrays.asList(1,2,3));
context.set("operand", "PLUS");
DMNModel model0 = runtime.getModels().get(0);
LOG.info("EVALUATE ALL:");
DMNResult evaluateAll = runtime.evaluateAll(model0, context);
LOG.info("{}", evaluateAll);
assertThat(evaluateAll.getDecisionResultByName("sumUp").getResult(), is(new BigDecimal("6")));
}
}
| apache-2.0 |
miracl/amcl | version22/java/FF.java | 15821 | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
/* Large Finite Field arithmetic */
/* AMCL mod p functions */
public final class FF {
private final BIG[] v;
private final int length;
/* Constructors */
public FF(int n)
{
v=new BIG[n];
for (int i=0;i<n;i++)
v[i]=new BIG(0);
length=n;
}
public int getlen()
{
return length;
}
/* set to integer */
public void set(int m)
{
zero();
v[0].set(0,(m&ROM.BMASK));
v[0].set(1,(m>>ROM.BASEBITS));
}
/* copy from FF b */
public void copy(FF b)
{
for (int i=0;i<length;i++)
{
v[i].copy(b.v[i]);
}
}
/* x=y<<n */
public void dsucopy(FF b)
{
for (int i=0;i<b.length;i++)
{
v[b.length+i].copy(b.v[i]);
v[i].zero();
}
}
/* x=y */
public void dscopy(FF b)
{
for (int i=0;i<b.length;i++)
{
v[i].copy(b.v[i]);
v[b.length+i].zero();
}
}
/* x=y>>n */
public void sducopy(FF b)
{
for (int i=0;i<length;i++)
{
v[i].copy(b.v[length+i]);
}
}
/* set to zero */
public void zero()
{
for (int i=0;i<length;i++)
{
v[i].zero();
}
}
public void one()
{
v[0].one();
for (int i=1;i<length;i++)
{
v[i].zero();
}
}
/* test equals 0 */
public boolean iszilch()
{
for (int i=0;i<length;i++)
{
if (!v[i].iszilch()) return false;
}
return true;
}
/* shift right by BIGBITS-bit words */
public void shrw(int n)
{
for (int i=0;i<n;i++)
{
v[i].copy(v[i+n]);
v[i+n].zero();
}
}
/* shift left by BIGBITS-bit words */
public void shlw(int n)
{
for (int i=0;i<n;i++)
{
v[n+i].copy(v[i]);
v[i].zero();
}
}
/* extract last bit */
public int parity()
{
return v[0].parity();
}
public int lastbits(int m)
{
return v[0].lastbits(m);
}
/* compare x and y - must be normalised, and of same length */
public static int comp(FF a,FF b)
{
int i,j;
for (i=a.length-1;i>=0;i--)
{
j=BIG.comp(a.v[i],b.v[i]);
if (j!=0) return j;
}
return 0;
}
/* recursive add */
public void radd(int vp,FF x,int xp,FF y,int yp,int n)
{
for (int i=0;i<n;i++)
{
v[vp+i].copy(x.v[xp+i]);
v[vp+i].add(y.v[yp+i]);
}
}
/* recursive inc */
public void rinc(int vp,FF y,int yp,int n)
{
for (int i=0;i<n;i++)
{
v[vp+i].add(y.v[yp+i]);
}
}
/* recursive sub */
public void rsub(int vp,FF x,int xp,FF y,int yp,int n)
{
for (int i=0;i<n;i++)
{
v[vp+i].copy(x.v[xp+i]);
v[vp+i].sub(y.v[yp+i]);
}
}
/* recursive dec */
public void rdec(int vp,FF y,int yp,int n)
{
for (int i=0;i<n;i++)
{
v[vp+i].sub(y.v[yp+i]);
}
}
/* simple add */
public void add(FF b)
{
for (int i=0;i<length;i++)
v[i].add(b.v[i]);
}
/* simple sub */
public void sub(FF b)
{
for (int i=0;i<length;i++)
v[i].sub(b.v[i]);
}
/* reverse sub */
public void revsub(FF b)
{
for (int i=0;i<length;i++)
v[i].rsub(b.v[i]);
}
/* increment/decrement by a small integer */
public void inc(int m)
{
v[0].inc(m);
norm();
}
public void dec(int m)
{
v[0].dec(m);
norm();
}
/* normalise - but hold any overflow in top part unless n<0 */
private void rnorm(int vp,int n)
{
boolean trunc=false;
int i;
long carry;
if (n<0)
{ /* -v n signals to do truncation */
n=-n;
trunc=true;
}
for (i=0;i<n-1;i++)
{
carry=v[vp+i].norm();
v[vp+i].xortop(carry<<ROM.P_TBITS);
v[vp+i+1].incl(carry);
}
carry=v[vp+n-1].norm();
if (trunc)
v[vp+n-1].xortop(carry<<ROM.P_TBITS);
}
public void norm()
{
rnorm(0,length);
}
/* shift left by one bit */
public void shl()
{
int i,carry,delay_carry=0;
for (i=0;i<length-1;i++)
{
carry=v[i].fshl(1);
v[i].inc(delay_carry);
v[i].xortop((long)carry<<ROM.P_TBITS);
delay_carry=carry;
}
v[length-1].fshl(1);
v[length-1].inc(delay_carry);
}
/* shift right by one bit */
public void shr()
{
int carry;
for (int i=length-1;i>0;i--)
{
carry=v[i].fshr(1);
v[i-1].xortop((long)carry<<ROM.P_TBITS);
}
v[0].fshr(1);
}
/* Convert to Hex String */
public String toString()
{
norm();
String s="";
for (int i=length-1;i>=0;i--)
{
s+=v[i].toString(); //s+=" ";
}
return s;
}
/*
public String toRawString(int len)
{
// norm(len);
String s="";
for (int i=len-1;i>=0;i--)
{
s+=v[i].toRawString(); s+=" ";
}
return s;
}
*/
/* Convert FFs to/from byte arrays */
public void toBytes(byte[] b)
{
for (int i=0;i<length;i++)
{
v[i].tobytearray(b,(length-i-1)*ROM.MODBYTES);
}
}
public static void fromBytes(FF x,byte[] b)
{
for (int i=0;i<x.length;i++)
{
x.v[i]=BIG.frombytearray(b,(x.length-i-1)*ROM.MODBYTES);
}
}
/* in-place swapping using xor - side channel resistant - lengths must be the same */
private static void cswap(FF a,FF b,int d)
{
for (int i=0;i<a.length;i++)
{
// BIG.cswap(a.v[i],b.v[i],d);
a.v[i].cswap(b.v[i],d);
}
}
/* z=x*y, t is workspace */
private void karmul(int vp,FF x,int xp,FF y,int yp,FF t,int tp,int n)
{
int nd2;
if (n==1)
{
DBIG d=BIG.mul(x.v[xp],y.v[yp]);
v[vp+1]=d.split(8*ROM.MODBYTES);
v[vp].copy(d);
return;
}
nd2=n/2;
radd(vp,x,xp,x,xp+nd2,nd2);
rnorm(vp,nd2); /* Important - required for 32-bit build */
radd(vp+nd2,y,yp,y,yp+nd2,nd2);
rnorm(vp+nd2,nd2); /* Important - required for 32-bit build */
t.karmul(tp,this,vp,this,vp+nd2,t,tp+n,nd2);
karmul(vp,x,xp,y,yp,t,tp+n,nd2);
karmul(vp+n,x,xp+nd2,y,yp+nd2,t,tp+n,nd2);
t.rdec(tp,this,vp,n);
t.rdec(tp,this,vp+n,n);
rinc(vp+nd2,t,tp,n);
rnorm(vp,2*n);
}
private void karsqr(int vp,FF x,int xp,FF t,int tp,int n)
{
int nd2;
if (n==1)
{
DBIG d=BIG.sqr(x.v[xp]);
v[vp+1].copy(d.split(8*ROM.MODBYTES));
v[vp].copy(d);
return;
}
nd2=n/2;
karsqr(vp,x,xp,t,tp+n,nd2);
karsqr(vp+n,x,xp+nd2,t,tp+n,nd2);
t.karmul(tp,x,xp,x,xp+nd2,t,tp+n,nd2);
rinc(vp+nd2,t,tp,n);
rinc(vp+nd2,t,tp,n);
rnorm(vp+nd2,n);
}
private void karmul_lower(int vp,FF x,int xp,FF y,int yp,FF t,int tp,int n)
{ /* Calculates Least Significant bottom half of x*y */
int nd2;
if (n==1)
{ /* only calculate bottom half of product */
v[vp].copy(BIG.smul(x.v[xp],y.v[yp]));
return;
}
nd2=n/2;
karmul(vp,x,xp,y,yp,t,tp+n,nd2);
t.karmul_lower(tp,x,xp+nd2,y,yp,t,tp+n,nd2);
rinc(vp+nd2,t,tp,nd2);
t.karmul_lower(tp,x,xp,y,yp+nd2,t,tp+n,nd2);
rinc(vp+nd2,t,tp,nd2);
rnorm(vp+nd2,-nd2); /* truncate it */
}
private void karmul_upper(FF x,FF y,FF t,int n)
{ /* Calculates Most Significant upper half of x*y, given lower part */
int nd2;
nd2=n/2;
radd(n,x,0,x,nd2,nd2);
radd(n+nd2,y,0,y,nd2,nd2);
rnorm(n,nd2);
rnorm(n+nd2,nd2);
t.karmul(0,this,n+nd2,this,n,t,n,nd2); /* t = (a0+a1)(b0+b1) */
karmul(n,x,nd2,y,nd2,t,n,nd2); /* z[n]= a1*b1 */
/* z[0-nd2]=l(a0b0) z[nd2-n]= h(a0b0)+l(t)-l(a0b0)-l(a1b1) */
t.rdec(0,this,n,n); /* t=t-a1b1 */
rinc(nd2,this,0,nd2); /* z[nd2-n]+=l(a0b0) = h(a0b0)+l(t)-l(a1b1) */
rdec(nd2,t,0,nd2); /* z[nd2-n]=h(a0b0)+l(t)-l(a1b1)-l(t-a1b1)=h(a0b0) */
rnorm(0,-n); /* a0b0 now in z - truncate it */
t.rdec(0,this,0,n); /* (a0+a1)(b0+b1) - a0b0 */
rinc(nd2,t,0,n);
rnorm(nd2,n);
}
/* z=x*y. Assumes x and y are of same length. */
public static FF mul(FF x,FF y)
{
int n=x.length;
FF z=new FF(2*n);
FF t=new FF(2*n);
// x.norm(); y.norm();
z.karmul(0,x,0,y,0,t,0,n);
return z;
}
/* z=x^2 */
public static FF sqr(FF x)
{
int n=x.length;
FF z=new FF(2*n);
FF t=new FF(2*n);
// x.norm();
z.karsqr(0,x,0,t,0,n);
return z;
}
/* return low part of product this*y */
public void lmul(FF y)
{
int n=length;
FF t=new FF(2*n);
FF x=new FF(n); x.copy(this);
// x.norm(); y.norm();
karmul_lower(0,x,0,y,0,t,0,n);
}
/* Set b=b mod c */
public void mod(FF c)
{
int k=0;
norm();
if (comp(this,c)<0)
return;
do
{
c.shl();
k++;
} while (comp(this,c)>=0);
while (k>0)
{
c.shr();
if (comp(this,c)>=0)
{
sub(c);
norm();
}
k--;
}
}
/* return This mod modulus, N is modulus, ND is Montgomery Constant */
public FF reduce(FF N,FF ND)
{ /* fast karatsuba Montgomery reduction */
int n=N.length;
FF t=new FF(2*n);
FF r=new FF(n);
FF m=new FF(n);
r.sducopy(this);
m.karmul_lower(0,this,0,ND,0,t,0,n);
karmul_upper(N,m,t,n);
m.sducopy(this);
r.add(N);
r.sub(m);
r.norm();
return r;
}
/* Set r=this mod b */
/* this is of length - 2*n */
/* r,b is of length - n */
public FF dmod(FF b)
{
int k,n=b.length;
FF m=new FF(2*n);
FF x=new FF(2*n);
FF r=new FF(n);
x.copy(this);
x.norm();
m.dsucopy(b); k=ROM.BIGBITS*n;
while (comp(x,m)>=0)
{
x.sub(m);
x.norm();
}
while (k>0)
{
m.shr();
if (comp(x,m)>=0)
{
x.sub(m);
x.norm();
}
k--;
}
r.copy(x);
r.mod(b);
return r;
}
/* Set return=1/this mod p. Binary method - a<p on entry */
public void invmodp(FF p)
{
int n=p.length;
FF u=new FF(n);
FF v=new FF(n);
FF x1=new FF(n);
FF x2=new FF(n);
FF t=new FF(n);
FF one=new FF(n);
one.one();
u.copy(this);
v.copy(p);
x1.copy(one);
x2.zero();
// reduce n in here as well!
while (comp(u,one)!=0 && comp(v,one)!=0)
{
while (u.parity()==0)
{
u.shr();
if (x1.parity()!=0)
{
x1.add(p);
x1.norm();
}
x1.shr();
}
while (v.parity()==0)
{
v.shr();
if (x2.parity()!=0)
{
x2.add(p);
x2.norm();
}
x2.shr();
}
if (comp(u,v)>=0)
{
u.sub(v);
u.norm();
if (comp(x1,x2)>=0) x1.sub(x2);
else
{
t.copy(p);
t.sub(x2);
x1.add(t);
}
x1.norm();
}
else
{
v.sub(u);
v.norm();
if (comp(x2,x1)>=0) x2.sub(x1);
else
{
t.copy(p);
t.sub(x1);
x2.add(t);
}
x2.norm();
}
}
if (comp(u,one)==0)
copy(x1);
else
copy(x2);
}
/* nresidue mod m */
public void nres(FF m)
{
int n=m.length;
FF d=new FF(2*n);
d.dsucopy(this);
copy(d.dmod(m));
}
public void redc(FF m,FF ND)
{
int n=m.length;
FF d=new FF(2*n);
mod(m);
d.dscopy(this);
copy(d.reduce(m,ND));
mod(m);
}
private void mod2m(int m)
{
for (int i=m;i<length;i++)
v[i].zero();
}
/* U=1/a mod 2^m - Arazi & Qi */
private FF invmod2m()
{
int i,n=length;
FF b=new FF(n);
FF c=new FF(n);
FF U=new FF(n);
FF t;
U.zero();
U.v[0].copy(v[0]);
U.v[0].invmod2m();
for (i=1;i<n;i<<=1)
{
b.copy(this); b.mod2m(i);
t=mul(U,b);
t.shrw(i); b.copy(t);
c.copy(this); c.shrw(i); c.mod2m(i);
c.lmul(U); c.mod2m(i);
b.add(c); b.norm();
b.lmul(U); b.mod2m(i);
c.one(); c.shlw(i); b.revsub(c); b.norm();
b.shlw(i);
U.add(b);
}
U.norm();
return U;
}
public void random(RAND rng)
{
int n=length;
for (int i=0;i<n;i++)
{
v[i].copy(BIG.random(rng));
}
/* make sure top bit is 1 */
while (v[n-1].nbits()<ROM.MODBYTES*8) v[n-1].copy(BIG.random(rng));
}
/* generate random x */
public void randomnum(FF p,RAND rng)
{
int n=length;
FF d=new FF(2*n);
for (int i=0;i<2*n;i++)
{
d.v[i].copy(BIG.random(rng));
}
copy(d.dmod(p));
}
/* this*=y mod p */
public void modmul(FF y,FF p,FF nd)
{
if (BIG.ff_pexceed(v[length-1],y.v[y.length-1])) mod(p);
FF d=mul(this,y);
copy(d.reduce(p,nd));
}
/* this*=y mod p */
public void modsqr(FF p,FF nd)
{
if (BIG.ff_sexceed(v[length-1])) mod(p);
FF d=sqr(this);
copy(d.reduce(p,nd));
}
/* this=this^e mod p using side-channel resistant Montgomery Ladder, for large e */
public void skpow(FF e,FF p)
{
int i,b,n=p.length;
FF R0=new FF(n);
FF R1=new FF(n);
FF ND=p.invmod2m();
mod(p);
R0.one();
R1.copy(this);
R0.nres(p);
R1.nres(p);
for (i=8*ROM.MODBYTES*n-1;i>=0;i--)
{
b=e.v[i/ROM.BIGBITS].bit(i%ROM.BIGBITS);
copy(R0);
modmul(R1,p,ND);
cswap(R0,R1,b);
R0.modsqr(p,ND);
R1.copy(this);
cswap(R0,R1,b);
}
copy(R0);
redc(p,ND);
}
/* this =this^e mod p using side-channel resistant Montgomery Ladder, for short e */
public void skpow(BIG e,FF p)
{
int i,b,n=p.length;
FF R0=new FF(n);
FF R1=new FF(n);
FF ND=p.invmod2m();
mod(p);
R0.one();
R1.copy(this);
R0.nres(p);
R1.nres(p);
for (i=8*ROM.MODBYTES-1;i>=0;i--)
{
b=e.bit(i);
copy(R0);
modmul(R1,p,ND);
cswap(R0,R1,b);
R0.modsqr(p,ND);
R1.copy(this);
cswap(R0,R1,b);
}
copy(R0);
redc(p,ND);
}
/* raise to an integer power - right-to-left method */
public void power(int e,FF p)
{
int n=p.length;
FF w=new FF(n);
FF ND=p.invmod2m();
boolean f=true;
w.copy(this);
w.nres(p);
if (e==2)
{
copy(w);
modsqr(p,ND);
}
else for (; ; )
{
if (e%2==1)
{
if (f) copy(w);
else modmul(w,p,ND);
f=false;
}
e>>=1;
if (e==0) break;
w.modsqr(p,ND);
}
redc(p,ND);
}
/* this=this^e mod p, faster but not side channel resistant */
public void pow(FF e,FF p)
{
int i,b,n=p.length;
FF w=new FF(n);
FF ND=p.invmod2m();
w.copy(this);
one();
nres(p);
w.nres(p);
for (i=8*ROM.MODBYTES*n-1;i>=0;i--)
{
modsqr(p,ND);
b=e.v[i/ROM.BIGBITS].bit(i%ROM.BIGBITS);
if (b==1) modmul(w,p,ND);
}
redc(p,ND);
}
/* double exponentiation r=x^e.y^f mod p */
public void pow2(BIG e,FF y,BIG f,FF p)
{
int i,eb,fb,n=p.length;
FF xn=new FF(n);
FF yn=new FF(n);
FF xy=new FF(n);
FF ND=p.invmod2m();
xn.copy(this);
yn.copy(y);
xn.nres(p);
yn.nres(p);
xy.copy(xn); xy.modmul(yn,p,ND);
one();
nres(p);
for (i=8*ROM.MODBYTES-1;i>=0;i--)
{
eb=e.bit(i);
fb=f.bit(i);
modsqr(p,ND);
if (eb==1)
{
if (fb==1) modmul(xy,p,ND);
else modmul(xn,p,ND);
}
else
{
if (fb==1) modmul(yn,p,ND);
}
}
redc(p,ND);
}
private static int igcd(int x,int y)
{ /* integer GCD, returns GCD of x and y */
int r;
if (y==0) return x;
while ((r=x%y)!=0)
{x=y;y=r;}
return y;
}
/* quick and dirty check for common factor with n */
public boolean cfactor(int s)
{
int r,n=length;
int g;
FF x=new FF(n);
FF y=new FF(n);
y.set(s);
x.copy(this);
x.norm();
do
{
x.sub(y);
x.norm();
while (!x.iszilch() && x.parity()==0) x.shr();
}
while (comp(x,y)>0);
g=(int)x.v[0].get(0);
r=igcd(s,g);
if (r>1) return true;
return false;
}
/* Miller-Rabin test for primality. Slow. */
public static boolean prime(FF p,RAND rng)
{
int i,j,s=0,n=p.length;
boolean loop;
FF d=new FF(n);
FF x=new FF(n);
FF unity=new FF(n);
FF nm1=new FF(n);
int sf=4849845; /* 3*5*.. *19 */
p.norm();
if (p.cfactor(sf)) return false;
unity.one();
nm1.copy(p);
nm1.sub(unity);
nm1.norm();
d.copy(nm1);
while (d.parity()==0)
{
d.shr();
s++;
}
if (s==0) return false;
for (i=0;i<10;i++)
{
x.randomnum(p,rng);
x.pow(d,p);
if (comp(x,unity)==0 || comp(x,nm1)==0) continue;
loop=false;
for (j=1;j<s;j++)
{
x.power(2,p);
if (comp(x,unity)==0) return false;
if (comp(x,nm1)==0) {loop=true; break;}
}
if (loop) continue;
return false;
}
return true;
}
} | apache-2.0 |
pdxrunner/geode | extensions/geode-modules/src/main/java/org/apache/geode/modules/session/catalina/DeltaSessionManager.java | 32261 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.modules.session.catalina;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.catalina.Container;
import org.apache.catalina.Context;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.Loader;
import org.apache.catalina.Pipeline;
import org.apache.catalina.Session;
import org.apache.catalina.Valve;
import org.apache.catalina.session.ManagerBase;
import org.apache.catalina.session.StandardSession;
import org.apache.catalina.util.CustomObjectInputStream;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.query.Query;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.SelectResults;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.modules.session.catalina.internal.DeltaSessionStatistics;
import org.apache.geode.modules.util.ContextMapper;
import org.apache.geode.modules.util.RegionConfiguration;
import org.apache.geode.modules.util.RegionHelper;
public abstract class DeltaSessionManager extends ManagerBase
implements Lifecycle, PropertyChangeListener, SessionManager {
/**
* The number of rejected sessions.
*/
private AtomicInteger rejectedSessions;
/**
* The maximum number of active Sessions allowed, or -1 for no limit.
*/
protected int maxActiveSessions = -1;
/**
* Has this <code>Manager</code> been started?
*/
protected AtomicBoolean started = new AtomicBoolean(false);
/**
* The name of this <code>Manager</code>
*/
protected String name;
protected Valve jvmRouteBinderValve;
protected Valve commitSessionValve;
protected SessionCache sessionCache;
protected static final String DEFAULT_REGION_NAME = RegionHelper.NAME + "_sessions";
protected static final boolean DEFAULT_ENABLE_GATEWAY_DELTA_REPLICATION = false;
protected static final boolean DEFAULT_ENABLE_GATEWAY_REPLICATION = false;
protected static final boolean DEFAULT_ENABLE_DEBUG_LISTENER = false;
protected static final boolean DEFAULT_ENABLE_COMMIT_VALVE = true;
protected static final boolean DEFAULT_ENABLE_COMMIT_VALVE_FAILFAST = false;
protected static final boolean DEFAULT_PREFER_DESERIALIZED_FORM = true;
/*
* This *MUST* only be assigned during start/startInternal otherwise it will be associated with
* the incorrect context class loader.
*/
protected Log LOGGER;
protected String regionName = DEFAULT_REGION_NAME;
protected String regionAttributesId; // the default is different for client-server and
// peer-to-peer
protected Boolean enableLocalCache; // the default is different for client-server and peer-to-peer
protected boolean enableCommitValve = DEFAULT_ENABLE_COMMIT_VALVE;
protected boolean enableCommitValveFailfast = DEFAULT_ENABLE_COMMIT_VALVE_FAILFAST;
protected boolean enableGatewayDeltaReplication = DEFAULT_ENABLE_GATEWAY_DELTA_REPLICATION;
protected boolean enableGatewayReplication = DEFAULT_ENABLE_GATEWAY_REPLICATION;
protected boolean enableDebugListener = DEFAULT_ENABLE_DEBUG_LISTENER;
protected boolean preferDeserializedForm = DEFAULT_PREFER_DESERIALIZED_FORM;
private Timer timer;
private final Set<String> sessionsToTouch;
private static final long TIMER_TASK_PERIOD =
Long.getLong("gemfiremodules.sessionTimerTaskPeriod", 10000);
private static final long TIMER_TASK_DELAY =
Long.getLong("gemfiremodules.sessionTimerTaskDelay", 10000);
public DeltaSessionManager() {
// Create the set to store sessions to be touched after get attribute requests
this.sessionsToTouch = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
}
@Override
public String getRegionName() {
return this.regionName;
}
public void setRegionName(String regionName) {
this.regionName = regionName;
}
@Override
public void setMaxInactiveInterval(final int interval) {
super.setMaxInactiveInterval(interval);
}
@Override
public String getRegionAttributesId() {
// This property will be null if it hasn't been set in the context.xml file.
// Since its default is dependent on the session cache, get the default from
// the session cache.
if (this.regionAttributesId == null) {
this.regionAttributesId = getSessionCache().getDefaultRegionAttributesId();
}
return this.regionAttributesId;
}
public void setRegionAttributesId(String regionType) {
this.regionAttributesId = regionType;
}
@Override
public boolean getEnableLocalCache() {
// This property will be null if it hasn't been set in the context.xml file.
// Since its default is dependent on the session cache, get the default from
// the session cache.
if (this.enableLocalCache == null) {
this.enableLocalCache = getSessionCache().getDefaultEnableLocalCache();
}
return this.enableLocalCache;
}
public void setEnableLocalCache(boolean enableLocalCache) {
this.enableLocalCache = enableLocalCache;
}
public int getMaxActiveSessions() {
return this.maxActiveSessions;
}
public void setMaxActiveSessions(int maxActiveSessions) {
int oldMaxActiveSessions = this.maxActiveSessions;
this.maxActiveSessions = maxActiveSessions;
support.firePropertyChange("maxActiveSessions", new Integer(oldMaxActiveSessions),
new Integer(this.maxActiveSessions));
}
@Override
public boolean getEnableGatewayDeltaReplication() {
// return this.enableGatewayDeltaReplication;
return false; // disabled
}
public void setEnableGatewayDeltaReplication(boolean enableGatewayDeltaReplication) {
this.enableGatewayDeltaReplication = enableGatewayDeltaReplication;
}
@Override
public boolean getEnableGatewayReplication() {
return this.enableGatewayReplication;
}
public void setEnableGatewayReplication(boolean enableGatewayReplication) {
this.enableGatewayReplication = enableGatewayReplication;
}
@Override
public boolean getEnableDebugListener() {
return this.enableDebugListener;
}
public void setEnableDebugListener(boolean enableDebugListener) {
this.enableDebugListener = enableDebugListener;
}
@Override
public boolean isCommitValveEnabled() {
return this.enableCommitValve;
}
public void setEnableCommitValve(boolean enable) {
this.enableCommitValve = enable;
}
@Override
public boolean isCommitValveFailfastEnabled() {
return this.enableCommitValveFailfast;
}
public void setEnableCommitValveFailfast(boolean enable) {
this.enableCommitValveFailfast = enable;
}
@Override
public boolean isBackingCacheAvailable() {
return sessionCache.isBackingCacheAvailable();
}
public void setPreferDeserializedForm(boolean enable) {
this.preferDeserializedForm = enable;
}
@Override
public boolean getPreferDeserializedForm() {
return this.preferDeserializedForm;
}
@Override
public String getStatisticsName() {
return getContextName().replace("/", "");
}
@Override
public Log getLogger() {
if (LOGGER == null) {
LOGGER = LogFactory.getLog(DeltaSessionManager.class);
}
return LOGGER;
}
public SessionCache getSessionCache() {
return this.sessionCache;
}
public DeltaSessionStatistics getStatistics() {
return getSessionCache().getStatistics();
}
public boolean isPeerToPeer() {
return getSessionCache().isPeerToPeer();
}
public boolean isClientServer() {
return getSessionCache().isClientServer();
}
/**
* This method was taken from StandardManager to set the default maxInactiveInterval based on the
* container (to 30 minutes).
* <p>
* Set the Container with which this Manager has been associated. If it is a Context (the usual
* case), listen for changes to the session timeout property.
*
* @param container The associated Container
*/
@Override
public void setContainer(Container container) {
// De-register from the old Container (if any)
if ((this.container != null) && (this.container instanceof Context)) {
((Context) this.container).removePropertyChangeListener(this);
}
// Default processing provided by our superclass
super.setContainer(container);
// Register with the new Container (if any)
if ((this.container != null) && (this.container instanceof Context)) {
// Overwrite the max inactive interval with the context's session timeout.
setMaxInactiveInterval(((Context) this.container).getSessionTimeout() * 60);
((Context) this.container).addPropertyChangeListener(this);
}
}
@Override
public Session findSession(String id) throws IOException {
if (id == null) {
return null;
}
if (getLogger().isDebugEnabled()) {
getLogger().debug(
this + ": Finding session " + id + " in " + getSessionCache().getOperatingRegionName());
}
DeltaSessionInterface session = (DeltaSessionInterface) getSessionCache().getSession(id);
/*
* Check that the context name for this session is the same as this manager's. This comes into
* play when multiple versions of a webapp are deployed and active at the same time; the context
* name will contain an embedded version number; something like /test###2.
*/
if (session != null && !session.getContextName().isEmpty()
&& !getContextName().equals(session.getContextName())) {
getLogger()
.info(this + ": Session " + id + " rejected as container name and context do not match: "
+ getContextName() + " != " + session.getContextName());
session = null;
}
if (session == null) {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Did not find session " + id + " in "
+ getSessionCache().getOperatingRegionName());
}
} else {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Found session " + id + " in "
+ getSessionCache().getOperatingRegionName() + ": " + session);
}
// The session was previously stored. Set new to false.
session.setNew(false);
// Check the manager.
// If the manager is null, the session was replicated and this is a
// failover situation. Reset the manager and activate the session.
if (session.getManager() == null) {
DeltaSessionInterface ds = (DeltaSessionInterface) session;
ds.setOwner(this);
ds.activate();
}
}
return session;
}
protected void initializeSessionCache() {
// Retrieve the cache
GemFireCacheImpl cache = (GemFireCacheImpl) CacheFactory.getAnyInstance();
if (cache == null) {
throw new IllegalStateException(
"No cache exists. Please configure either a PeerToPeerCacheLifecycleListener or ClientServerCacheLifecycleListener in the server.xml file.");
}
// Create the appropriate session cache
this.sessionCache = cache.isClient() ? new ClientServerSessionCache(this, cache)
: new PeerToPeerSessionCache(this, cache);
// Initialize the session cache
this.sessionCache.initialize();
}
@Override
protected StandardSession getNewSession() {
return new DeltaSession(this);
}
@Override
public void remove(Session session) {
remove(session, false);
}
public void remove(Session session, boolean update) {
// super.remove(session);
// Remove the session from the region if necessary.
// It will have already been removed if it expired implicitly.
DeltaSessionInterface ds = (DeltaSessionInterface) session;
if (ds.getExpired()) {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Expired session " + session.getId() + " from "
+ getSessionCache().getOperatingRegionName());
}
} else {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Destroying session " + session.getId() + " from "
+ getSessionCache().getOperatingRegionName());
}
getSessionCache().destroySession(session.getId());
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Destroyed session " + session.getId() + " from "
+ getSessionCache().getOperatingRegionName());
}
}
}
@Override
public void add(Session session) {
// super.add(session);
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Storing session " + session.getId() + " into "
+ getSessionCache().getOperatingRegionName());
}
getSessionCache().putSession(session);
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Stored session " + session.getId() + " into "
+ getSessionCache().getOperatingRegionName());
}
getSessionCache().getStatistics().incSessionsCreated();
}
@Override
public int getRejectedSessions() {
return this.rejectedSessions.get();
}
public void setRejectedSessions(int rejectedSessions) {
this.rejectedSessions.set(rejectedSessions);
}
private void incrementRejectedSessions() {
this.rejectedSessions.incrementAndGet();
}
/**
* Returns the number of active sessions
*
* @return number of sessions active
*/
@Override
public int getActiveSessions() {
return getSessionCache().size();
}
/**
* For debugging: return a list of all session ids currently active
*/
@Override
public String listSessionIds() {
StringBuilder builder = new StringBuilder();
Iterator<String> sessionIds = getSessionCache().keySet().iterator();
while (sessionIds.hasNext()) {
builder.append(sessionIds.next());
if (sessionIds.hasNext()) {
builder.append(" ");
}
}
return builder.toString();
}
/*
* If local caching is enabled, add the session to the set of sessions to be touched. A timer task
* will be periodically invoked to get the session in the session region to update its last
* accessed time. This prevents the session from expiring in the case where the application is
* only getting attributes from the session and never putting attributes into the session. If
* local caching is disabled. the session's last accessed time would already have been updated
* properly in the sessions region.
*
* Note: Due to issues in GemFire expiry, sessions are always asynchronously touched using a
* function regardless whether or not local caching is enabled. This prevents premature
* expiration.
*/
protected void addSessionToTouch(String sessionId) {
this.sessionsToTouch.add(sessionId);
}
protected Set<String> getSessionsToTouch() {
return this.sessionsToTouch;
}
protected boolean removeTouchedSession(String sessionId) {
return this.sessionsToTouch.remove(sessionId);
}
protected void scheduleTimerTasks() {
// Create the timer
this.timer = new Timer("Timer for " + toString(), true);
// Schedule the task to handle sessions to be touched
scheduleTouchSessionsTask();
// Schedule the task to maintain the maxActive sessions
scheduleDetermineMaxActiveSessionsTask();
}
private void scheduleTouchSessionsTask() {
TimerTask task = new TimerTask() {
@Override
public void run() {
// Get the sessionIds to touch and clear the set inside synchronization
Set<String> sessionIds = null;
sessionIds = new HashSet<String>(getSessionsToTouch());
getSessionsToTouch().clear();
// Touch the sessions we currently have
if (sessionIds != null && (!sessionIds.isEmpty())) {
getSessionCache().touchSessions(sessionIds);
if (getLogger().isDebugEnabled()) {
getLogger().debug(DeltaSessionManager.this + ": Touched sessions: " + sessionIds);
}
}
}
};
this.timer.schedule(task, TIMER_TASK_DELAY, TIMER_TASK_PERIOD);
}
protected void cancelTimer() {
if (timer != null) {
this.timer.cancel();
}
}
private void scheduleDetermineMaxActiveSessionsTask() {
TimerTask task = new TimerTask() {
@Override
public void run() {
int currentActiveSessions = getSessionCache().size();
if (currentActiveSessions > getMaxActive()) {
setMaxActive(currentActiveSessions);
if (getLogger().isDebugEnabled()) {
getLogger().debug(
DeltaSessionManager.this + ": Set max active sessions: " + currentActiveSessions);
}
}
}
};
this.timer.schedule(task, TIMER_TASK_DELAY, TIMER_TASK_PERIOD);
}
@Override
public void load() throws ClassNotFoundException, IOException {
doLoad();
ContextMapper.addContext(getContextName(), this);
}
@Override
public void unload() throws IOException {
doUnload();
ContextMapper.removeContext(getContextName());
}
protected void registerJvmRouteBinderValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Registering JVM route binder valve");
}
jvmRouteBinderValve = new JvmRouteBinderValve();
getPipeline().addValve(jvmRouteBinderValve);
}
protected Pipeline getPipeline() {
return getContainer().getPipeline();
}
protected void unregisterJvmRouteBinderValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Unregistering JVM route binder valve");
}
if (jvmRouteBinderValve != null) {
getPipeline().removeValve(jvmRouteBinderValve);
}
}
protected void registerCommitSessionValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Registering CommitSessionValve");
}
commitSessionValve = new CommitSessionValve();
getPipeline().addValve(commitSessionValve);
}
protected void unregisterCommitSessionValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Unregistering CommitSessionValve");
}
if (commitSessionValve != null) {
getPipeline().removeValve(commitSessionValve);
}
}
// ------------------------------ Lifecycle Methods
/**
* Process property change events from our associated Context.
* <p>
* Part of this method implementation was taken from StandardManager. The sessionTimeout can be
* changed in the web.xml which is processed after the context.xml. The context (and the default
* session timeout) would already have been set in this Manager. This is the way to get the new
* session timeout value specified in the web.xml.
* <p>
* The precedence order for setting the session timeout value is:
* <p>
* <ol>
* <li>the max inactive interval is set based on the Manager defined in the context.xml
* <li>the max inactive interval is then overwritten by the value of the Context's session timeout
* when setContainer is called
* <li>the max inactive interval is then overwritten by the value of the session-timeout specified
* in the web.xml (if any)
* </ol>
*
* @param event The property change event that has occurred
*/
@Override
public void propertyChange(PropertyChangeEvent event) {
// Validate the source of this event
if (!(event.getSource() instanceof Context)) {
return;
}
Context context = (Context) event.getSource();
// Process a relevant property change
if (event.getPropertyName().equals("sessionTimeout")) {
try {
int interval = ((Integer) event.getNewValue()).intValue();
if (interval < RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL) {
getLogger().warn("The configured session timeout of " + interval
+ " minutes is invalid. Using the original value of " + event.getOldValue()
+ " minutes.");
interval = ((Integer) event.getOldValue()).intValue();;
}
// StandardContext.setSessionTimeout passes -1 if the configured timeout
// is 0; otherwise it passes the value set in web.xml. If the interval
// parameter equals the default, set the max inactive interval to the
// default (no expiration); otherwise set it in seconds.
setMaxInactiveInterval(interval == RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL
? RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL : interval * 60);
} catch (NumberFormatException e) {
getLogger()
.error(sm.getString("standardManager.sessionTimeout", event.getNewValue().toString()));
}
}
}
/**
* Save any currently active sessions in the appropriate persistence mechanism, if any. If
* persistence is not supported, this method returns without doing anything.
*
* @throws IOException if an input/output error occurs
*/
protected void doUnload() throws IOException {
QueryService querySvc = sessionCache.getCache().getQueryService();
Context context = getTheContext();
if (context == null) {
return;
}
String regionName;
if (getRegionName().startsWith("/")) {
regionName = getRegionName();
} else {
regionName = "/" + getRegionName();
}
Query query = querySvc.newQuery("select s.id from " + regionName
+ " as s where s.contextName = '" + context.getPath() + "'");
getLogger().debug("Query: " + query.getQueryString());
SelectResults results;
try {
results = (SelectResults) query.execute();
} catch (Exception ex) {
getLogger().error("Unable to perform query during doUnload", ex);
return;
}
if (results.isEmpty()) {
getLogger().debug("No sessions to unload for context " + context.getPath());
return; // nothing to do
}
// Open an output stream to the specified pathname, if any
File store = sessionStore(context.getPath());
if (store == null) {
return;
}
if (getLogger().isDebugEnabled()) {
getLogger().debug("Unloading sessions to " + store.getAbsolutePath());
}
FileOutputStream fos = null;
BufferedOutputStream bos = null;
ObjectOutputStream oos = null;
boolean error = false;
try {
fos = new FileOutputStream(store.getAbsolutePath());
bos = new BufferedOutputStream(fos);
oos = new ObjectOutputStream(bos);
} catch (IOException e) {
error = true;
getLogger().error("Exception unloading sessions", e);
throw e;
} finally {
if (error) {
if (oos != null) {
try {
oos.close();
} catch (IOException ioe) {
// Ignore
}
}
if (bos != null) {
try {
bos.close();
} catch (IOException ioe) {
// Ignore
}
}
if (fos != null) {
try {
fos.close();
} catch (IOException ioe) {
// Ignore
}
}
}
}
ArrayList<DeltaSessionInterface> list = new ArrayList<DeltaSessionInterface>();
Iterator<String> elements = results.iterator();
while (elements.hasNext()) {
String id = elements.next();
DeltaSessionInterface session = (DeltaSessionInterface) findSession(id);
if (session != null) {
list.add(session);
}
}
// Write the number of active sessions, followed by the details
if (getLogger().isDebugEnabled())
getLogger().debug("Unloading " + list.size() + " sessions");
try {
oos.writeObject(new Integer(list.size()));
for (DeltaSessionInterface session : list) {
if (session instanceof StandardSession) {
StandardSession standardSession = (StandardSession) session;
standardSession.passivate();
standardSession.writeObjectData(oos);
} else {
// All DeltaSessionInterfaces as of Geode 1.0 should be based on StandardSession
throw new IOException("Session should be of type StandardSession");
}
}
} catch (IOException e) {
getLogger().error("Exception unloading sessions", e);
try {
oos.close();
} catch (IOException f) {
// Ignore
}
throw e;
}
// Flush and close the output stream
try {
oos.flush();
} finally {
try {
oos.close();
} catch (IOException f) {
// Ignore
}
}
// Locally destroy the sessions we just wrote
if (getSessionCache().isClientServer()) {
for (DeltaSessionInterface session : list) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Locally destroying session " + session.getId());
}
getSessionCache().getOperatingRegion().localDestroy(session.getId());
}
}
// // Expire all the sessions we just wrote
// if (getLogger().isDebugEnabled()) {
// getLogger().debug("Expiring " + list.size() + " persisted sessions");
// }
// Iterator<StandardSession> expires = list.iterator();
// while (expires.hasNext()) {
// StandardSession session = expires.next();
// try {
// session.expire(false);
// } catch (Throwable t) {
//// ExceptionUtils.handleThrowable(t);
// } finally {
// session.recycle();
// }
// }
if (getLogger().isDebugEnabled()) {
getLogger().debug("Unloading complete");
}
}
/**
* Load any currently active sessions that were previously unloaded to the appropriate persistence
* mechanism, if any. If persistence is not supported, this method returns without doing anything.
*
* @throws ClassNotFoundException if a serialized class cannot be found during the reload
* @throws IOException if an input/output error occurs
*/
protected void doLoad() throws ClassNotFoundException, IOException {
Context context = getTheContext();
if (context == null) {
return;
}
// Open an input stream to the specified pathname, if any
File store = sessionStore(context.getPath());
if (store == null) {
getLogger().debug("No session store file found");
return;
}
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loading sessions from " + store.getAbsolutePath());
}
FileInputStream fis = null;
BufferedInputStream bis = null;
ObjectInputStream ois = null;
Loader loader = null;
ClassLoader classLoader = null;
try {
fis = new FileInputStream(store.getAbsolutePath());
bis = new BufferedInputStream(fis);
if (getTheContext() != null) {
loader = getTheContext().getLoader();
}
if (loader != null) {
classLoader = loader.getClassLoader();
}
if (classLoader != null) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Creating custom object input stream for class loader");
}
ois = new CustomObjectInputStream(bis, classLoader);
} else {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Creating standard object input stream");
}
ois = new ObjectInputStream(bis);
}
} catch (FileNotFoundException e) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("No persisted data file found");
}
return;
} catch (IOException e) {
getLogger().error("Exception loading sessions", e);
if (fis != null) {
try {
fis.close();
} catch (IOException f) {
// Ignore
}
}
if (bis != null) {
try {
bis.close();
} catch (IOException f) {
// Ignore
}
}
throw e;
}
// Load the previously unloaded active sessions
try {
Integer count = (Integer) ois.readObject();
int n = count.intValue();
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loading " + n + " persisted sessions");
}
for (int i = 0; i < n; i++) {
StandardSession session = getNewSession();
session.readObjectData(ois);
session.setManager(this);
Region region = getSessionCache().getOperatingRegion();
DeltaSessionInterface existingSession = (DeltaSessionInterface) region.get(session.getId());
// Check whether the existing session is newer
if (existingSession != null
&& existingSession.getLastAccessedTime() > session.getLastAccessedTime()) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loaded session " + session.getId() + " is older than cached copy");
}
continue;
}
// Check whether the new session has already expired
if (!session.isValid()) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loaded session " + session.getId() + " is invalid");
}
continue;
}
getLogger().debug("Loading session " + session.getId());
session.activate();
add(session);
}
} catch (ClassNotFoundException e) {
getLogger().error(e);
try {
ois.close();
} catch (IOException f) {
// Ignore
}
throw e;
} catch (IOException e) {
getLogger().error(e);
try {
ois.close();
} catch (IOException f) {
// Ignore
}
throw e;
} finally {
// Close the input stream
try {
ois.close();
} catch (IOException f) {
// ignored
}
// Delete the persistent storage file
if (store.exists()) {
store.delete();
}
}
}
/**
* Return a File object representing the pathname to our persistence file, if any.
*/
private File sessionStore(String ctxPath) {
String storeDir = System.getProperty("catalina.base");
if (storeDir == null || storeDir.isEmpty()) {
storeDir = System.getProperty("java.io.tmpdir");
} else {
storeDir += System.getProperty("file.separator") + "temp";
}
File file = new File(storeDir, ctxPath.replaceAll("/", "_") + ".sessions.ser");
return (file);
}
@Override
public String toString() {
return new StringBuilder().append(getClass().getSimpleName()).append("[").append("container=")
.append(getTheContext()).append("; regionName=").append(this.regionName)
.append("; regionAttributesId=").append(this.regionAttributesId).append("]").toString();
}
protected String getContextName() {
return getTheContext().getName();
}
public Context getTheContext() {
if (getContainer() instanceof Context) {
return (Context) getContainer();
} else {
getLogger().error("Unable to unload sessions - container is of type "
+ getContainer().getClass().getName() + " instead of StandardContext");
return null;
}
}
}
| apache-2.0 |
fhueske/flink | flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/catalog/GenericInMemoryCatalogTest.java | 6719 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.catalog;
import org.apache.flink.table.catalog.stats.CatalogColumnStatistics;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataBase;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataBinary;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataBoolean;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDate;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDouble;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataLong;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataString;
import org.apache.flink.table.catalog.stats.CatalogTableStatistics;
import org.apache.flink.table.catalog.stats.Date;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test for GenericInMemoryCatalog.
*/
public class GenericInMemoryCatalogTest extends CatalogTestBase {
@BeforeClass
public static void init() {
catalog = new GenericInMemoryCatalog(TEST_CATALOG_NAME);
catalog.open();
}
// ------ tables ------
@Test
public void testDropTable_partitionedTable() throws Exception {
catalog.createDatabase(db1, createDb(), false);
catalog.createTable(path1, createPartitionedTable(), false);
CatalogPartition catalogPartition = createPartition();
CatalogPartitionSpec catalogPartitionSpec = createPartitionSpec();
catalog.createPartition(path1, catalogPartitionSpec, catalogPartition, false);
assertTrue(catalog.tableExists(path1));
catalog.dropTable(path1, false);
assertFalse(catalog.tableExists(path1));
assertFalse(catalog.partitionExists(path1, catalogPartitionSpec));
}
@Test
public void testRenameTable_partitionedTable() throws Exception {
catalog.createDatabase(db1, createDb(), false);
CatalogTable table = createPartitionedTable();
catalog.createTable(path1, table, false);
CatalogPartition catalogPartition = createPartition();
CatalogPartitionSpec catalogPartitionSpec = createPartitionSpec();
catalog.createPartition(path1, catalogPartitionSpec, catalogPartition, false);
CatalogTestUtil.checkEquals(table, (CatalogTable) catalog.getTable(path1));
assertTrue(catalog.partitionExists(path1, catalogPartitionSpec));
catalog.renameTable(path1, t2, false);
CatalogTestUtil.checkEquals(table, (CatalogTable) catalog.getTable(path3));
assertTrue(catalog.partitionExists(path3, catalogPartitionSpec));
assertFalse(catalog.tableExists(path1));
assertFalse(catalog.partitionExists(path1, catalogPartitionSpec));
}
// ------ statistics ------
@Test
public void testStatistics() throws Exception {
// Table related
catalog.createDatabase(db1, createDb(), false);
CatalogTable table = createTable();
catalog.createTable(path1, table, false);
CatalogTestUtil.checkEquals(catalog.getTableStatistics(path1), CatalogTableStatistics.UNKNOWN);
CatalogTestUtil.checkEquals(catalog.getTableColumnStatistics(path1), CatalogColumnStatistics.UNKNOWN);
CatalogTableStatistics tableStatistics = new CatalogTableStatistics(5, 2, 100, 575);
catalog.alterTableStatistics(path1, tableStatistics, false);
CatalogTestUtil.checkEquals(tableStatistics, catalog.getTableStatistics(path1));
CatalogColumnStatistics columnStatistics = createColumnStats();
catalog.alterTableColumnStatistics(path1, columnStatistics, false);
CatalogTestUtil.checkEquals(columnStatistics, catalog.getTableColumnStatistics(path1));
// Partition related
catalog.createDatabase(db2, createDb(), false);
CatalogTable table2 = createPartitionedTable();
catalog.createTable(path2, table2, false);
CatalogPartitionSpec partitionSpec = createPartitionSpec();
catalog.createPartition(path2, partitionSpec, createPartition(), false);
CatalogTestUtil.checkEquals(catalog.getPartitionStatistics(path2, partitionSpec), CatalogTableStatistics.UNKNOWN);
CatalogTestUtil.checkEquals(catalog.getPartitionColumnStatistics(path2, partitionSpec), CatalogColumnStatistics.UNKNOWN);
catalog.alterPartitionStatistics(path2, partitionSpec, tableStatistics, false);
CatalogTestUtil.checkEquals(tableStatistics, catalog.getPartitionStatistics(path2, partitionSpec));
catalog.alterPartitionColumnStatistics(path2, partitionSpec, columnStatistics, false);
CatalogTestUtil.checkEquals(columnStatistics, catalog.getPartitionColumnStatistics(path2, partitionSpec));
// Clean up
catalog.dropTable(path1, false);
catalog.dropDatabase(db1, false);
catalog.dropTable(path2, false);
catalog.dropDatabase(db2, false);
}
// ------ utilities ------
@Override
protected boolean isGeneric() {
return true;
}
private CatalogColumnStatistics createColumnStats() {
CatalogColumnStatisticsDataBoolean booleanColStats = new CatalogColumnStatisticsDataBoolean(55L, 45L, 5L);
CatalogColumnStatisticsDataLong longColStats = new CatalogColumnStatisticsDataLong(-123L, 763322L, 23L, 79L);
CatalogColumnStatisticsDataString stringColStats = new CatalogColumnStatisticsDataString(152L, 43.5D, 20L, 0L);
CatalogColumnStatisticsDataDate dateColStats = new CatalogColumnStatisticsDataDate(new Date(71L),
new Date(17923L), 1321, 0L);
CatalogColumnStatisticsDataDouble doubleColStats = new CatalogColumnStatisticsDataDouble(-123.35D, 7633.22D, 23L, 79L);
CatalogColumnStatisticsDataBinary binaryColStats = new CatalogColumnStatisticsDataBinary(755L, 43.5D, 20L);
Map<String, CatalogColumnStatisticsDataBase> colStatsMap = new HashMap<>(6);
colStatsMap.put("b1", booleanColStats);
colStatsMap.put("l2", longColStats);
colStatsMap.put("s3", stringColStats);
colStatsMap.put("d4", dateColStats);
colStatsMap.put("dd5", doubleColStats);
colStatsMap.put("bb6", binaryColStats);
return new CatalogColumnStatistics(colStatsMap);
}
}
| apache-2.0 |
jayantgolhar/Hadoop-0.21.0 | mapred/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java | 5061 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.streaming;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.Utils;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
/**
* This test case tests the symlink creation
* utility provided by distributed caching
*/
public class TestSymLink
{
String INPUT_FILE = "/testing-streaming/input.txt";
String OUTPUT_DIR = "/testing-streaming/out";
String CACHE_FILE = "/testing-streaming/cache.txt";
String input = "check to see if we can read this none reduce";
String map = "xargs cat ";
String reduce = "cat";
String mapString = "testlink\n";
String cacheString = "This is just the cache string";
StreamJob job;
@Test
public void testSymLink() throws Exception
{
boolean mayExit = false;
MiniMRCluster mr = null;
MiniDFSCluster dfs = null;
try {
Configuration conf = new Configuration();
dfs = new MiniDFSCluster(conf, 1, true, null);
FileSystem fileSys = dfs.getFileSystem();
String namenode = fileSys.getUri().toString();
mr = new MiniMRCluster(1, namenode, 3);
// During tests, the default Configuration will use a local mapred
// So don't specify -config or -cluster
String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
String strNamenode = "fs.default.name=" + namenode;
String argv[] = new String[] {
"-input", INPUT_FILE,
"-output", OUTPUT_DIR,
"-mapper", map,
"-reducer", reduce,
//"-verbose",
//"-jobconf", "stream.debug=set"
"-jobconf", strNamenode,
"-jobconf", strJobtracker,
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
"-jobconf",
JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
"-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
"-Dbuild.test=" + System.getProperty("build.test") + " " +
conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS,
conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
"-jobconf",
JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS+ "=" +
"-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
"-Dbuild.test=" + System.getProperty("build.test") + " " +
conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS,
conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
"-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink"
};
fileSys.delete(new Path(OUTPUT_DIR), true);
DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
file.writeBytes(mapString);
file.close();
file = fileSys.create(new Path(CACHE_FILE));
file.writeBytes(cacheString);
file.close();
job = new StreamJob(argv, mayExit);
job.go();
fileSys = dfs.getFileSystem();
String line = null;
Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
new Path(OUTPUT_DIR),
new Utils.OutputFileUtils
.OutputFilesFilter()));
for (int i = 0; i < fileList.length; i++){
System.out.println(fileList[i].toString());
BufferedReader bread =
new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
line = bread.readLine();
System.out.println(line);
}
assertEquals(cacheString + "\t", line);
} finally{
if (dfs != null) { dfs.shutdown(); }
if (mr != null) { mr.shutdown();}
}
}
public static void main(String[]args) throws Exception
{
new TestStreaming().testCommandLine();
}
}
| apache-2.0 |
spennihana/h2o-3 | h2o-core/src/main/java/water/util/IcedHashMapGeneric.java | 8975 | package water.util;
import water.AutoBuffer;
import water.Freezable;
import water.H2O;
import water.Iced;
import water.nbhm.NonBlockingHashMap;
import java.io.Serializable;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
/**
*
* Generalization of standard IcedHashMap (Iced NBHM wrapper) with relaxed restrictions on K/V pairs.
*
* K/V pairs do not have to follow the same mode, each K/V pair is independent and can be one of:
*
* String | Freezable -> Integer | String | Freezable | Freezable[].
*
* Values are type checked during put operation.
*
*/
public class IcedHashMapGeneric<K, V> extends Iced implements Map<K, V>, Cloneable, Serializable {
public boolean isSupportedKeyType(Object K) {
return (K instanceof Freezable[] || K instanceof Freezable || K instanceof String);
}
public boolean isSupportedValType(Object V) {
return (V instanceof Freezable[] || V instanceof Freezable || V instanceof String || V instanceof Integer);
}
public IcedHashMapGeneric(){init();}
private transient volatile boolean _write_lock;
transient NonBlockingHashMap<K,V> _map;
protected Map<K,V> map(){return _map;}
public int size() { return map().size(); }
public boolean isEmpty() { return map().isEmpty(); }
public boolean containsKey(Object key) { return map().containsKey(key); }
public boolean containsValue(Object value) { return map().containsValue(value); }
public V get(Object key) { return (V)map().get(key); }
public V put(K key, V val) {
assert !_write_lock;
if(!isSupportedKeyType(key))
throw new IllegalArgumentException("given key type is not supported: " + key.getClass().getName());
if(!isSupportedValType(val))
throw new IllegalArgumentException("given val type is not supported: " + val.getClass().getName());
return (V)map().put(key, val);
}
public V remove(Object key) { assert !_write_lock; return map().remove(key); }
public void putAll(Map<? extends K, ? extends V> m) { assert !_write_lock;
for(Entry<? extends K, ? extends V> e:m.entrySet())
put(e.getKey(),e.getValue());
}
public void clear() { assert !_write_lock; map().clear(); }
public Set<K> keySet() { return map().keySet(); }
public Collection<V> values() { return map().values(); }
public Set<Entry<K, V>> entrySet() { return map().entrySet(); }
public boolean equals(Object o) { return map().equals(o); }
public int hashCode() { return map().hashCode(); }
private boolean isStringKey(int mode){
return mode % 2 == 1;
}
private boolean isStringVal(int mode){return mode == 1 || mode == 2;}
private boolean isFreezeVal(int mode){return mode == 3 || mode == 4;}
private boolean isFArrayVal(int mode){return mode == 5 || mode == 6;}
private boolean isIntegrVal(int mode){return mode == 7 || mode == 8;}
// This comment is stolen from water.parser.Categorical:
//
// Since this is a *concurrent* hashtable, writing it whilst its being
// updated is tricky. If the table is NOT being updated, then all is written
// as expected. If the table IS being updated we only promise to write the
// Keys that existed at the time the table write began. If elements are
// being deleted, they may be written anyways. If the Values are changing, a
// random Value is written.
public final AutoBuffer write_impl( AutoBuffer ab ) {
_write_lock = true;
try{
for( Entry<K, V> e : map().entrySet() ) {
K key = e.getKey();
assert key != null;
V val = e.getValue();
assert val != null;
int mode = 0;
if (key instanceof String) {
if (val instanceof String) {
mode = 1;
} else if(val instanceof Freezable){
mode = 3;
} else if(val instanceof Freezable[]) {
mode = 5;
} else if( val instanceof Integer ){
mode = 7;
} else {
throw new IllegalArgumentException("unsupported value class " + val.getClass().getName());
}
} else {
if(!(key instanceof Iced))
throw new IllegalArgumentException("key must be String or Freezable, got " + key.getClass().getName());
if (val instanceof String) {
mode = 2;
} else if(val instanceof Freezable) {
mode = 4;
} else if(val instanceof Freezable[]) {
mode = 6;
} else if (val instanceof Integer){
mode = 8;
} else {
throw new IllegalArgumentException("unsupported value class " + val.getClass().getName());
}
}
ab.put1(mode); // Type of hashmap being serialized
// put key
if (isStringKey(mode)) ab.putStr((String) key);
else ab.put((Freezable) key);
// put value
if (isStringVal(mode))
ab.putStr((String) val);
else if(isFreezeVal(mode))
ab.put((Freezable) val);
else if (isFArrayVal(mode)) {
ab.put4(((Freezable[]) val).length);
for (Freezable v : (Freezable[]) val) ab.put(v);
} else if(isIntegrVal(mode))
ab.put4((Integer)val);
else
throw H2O.fail();
}
ab.put1(-1);
} catch(Throwable t){
System.err.println("Iced hash map serialization failed! " + t.toString() + ", msg = " + t.getMessage());
t.printStackTrace();
throw H2O.fail("Iced hash map serialization failed!" + t.toString() + ", msg = " + t.getMessage());
} finally {
_write_lock = false;
}
return ab;
}
protected Map<K, V> init() { return _map = new NonBlockingHashMap<>(); }
/**
* Helper for serialization - fills the mymap() from K-V pairs in the AutoBuffer object
* @param ab Contains the serialized K-V pairs
*/
public final IcedHashMapGeneric read_impl(AutoBuffer ab) {
try {
assert map() == null || map().isEmpty(); // Fresh from serializer, no constructor has run
Map<K, V> map = init();
K key;
V val;
int mode;
while ((mode = ab.get1()) != -1) {
key = isStringKey(mode)?(K)ab.getStr():(K)ab.get();
if (isStringVal(mode))
val = (V)ab.getStr();
else if(isFreezeVal(mode))
val = (V)ab.get();
else if (isFArrayVal(mode)) {
Freezable[] vals = new Freezable[ab.get4()];
for (int i = 0; i < vals.length; ++i) vals[i] = ab.get();
val = (V)vals;
} else if(isIntegrVal(mode))
val = (V) (new Integer(ab.get4()));
else
throw H2O.fail();
map.put(key,val);
}
return this;
} catch(Throwable t) {
t.printStackTrace();
if (null == t.getCause()) {
throw H2O.fail("IcedHashMap deserialization failed! + " + t.toString() + ", msg = " + t.getMessage() + ", cause: null");
} else {
throw H2O.fail("IcedHashMap deserialization failed! + " + t.toString() + ", msg = " + t.getMessage() +
", cause: " + t.getCause().toString() +
", cause msg: " + t.getCause().getMessage() +
", cause stacktrace: " + java.util.Arrays.toString(t.getCause().getStackTrace()));
}
}
}
public final IcedHashMapGeneric readJSON_impl(AutoBuffer ab ) {throw H2O.unimpl();}
public final AutoBuffer writeJSON_impl( AutoBuffer ab ) {
boolean first = true;
for (Entry<K, V> entry : map().entrySet()) {
K key = entry.getKey();
V value = entry.getValue();
assert entry.getKey() instanceof String;
assert value instanceof String || value instanceof String[] || value instanceof Integer || value instanceof Freezable || value instanceof Freezable[];
if (first) { first = false; } else {ab.put1(',').put1(' '); }
ab.putJSONName((String) key);
ab.put1(':');
if (value instanceof String)
ab.putJSONName((String) value);
else if (value instanceof String[])
ab.putJSONAStr((String[]) value);
else if (value instanceof Integer)
ab.putJSON4((Integer) value);
else if (value instanceof Freezable)
ab.putJSON((Freezable) value);
else if (value instanceof Freezable[])
ab.putJSONA((Freezable[]) value);
}
// ab.put1('}'); // NOTE: the serialization framework adds this automagically
return ab;
}
// Subtypes which allow us to determine the type parameters at runtime, for generating schema metadata.
public static class IcedHashMapStringString extends IcedHashMapGeneric<String, String> {}
public static class IcedHashMapStringObject extends IcedHashMapGeneric<String, Object> {}
}
| apache-2.0 |
katre/bazel | src/test/java/com/google/devtools/build/lib/bazel/repository/downloader/HttpConnectorTest.java | 27895 | // Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bazel.repository.downloader;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.bazel.repository.downloader.DownloaderTestUtils.sendLines;
import static com.google.devtools.build.lib.bazel.repository.downloader.HttpParser.readHttpRequest;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
import static java.nio.charset.StandardCharsets.US_ASCII;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
import com.google.common.io.CharStreams;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.testutil.ManualClock;
import com.google.devtools.build.lib.testutil.ManualSleeper;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.InetAddress;
import java.net.Proxy;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.Timeout;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for {@link HttpConnector}. */
@RunWith(JUnit4.class)
public class HttpConnectorTest {
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Rule
public final TemporaryFolder testFolder = new TemporaryFolder();
@Rule
public final Timeout globalTimeout = new Timeout(10000);
private final ExecutorService executor = Executors.newFixedThreadPool(2);
private final ManualClock clock = new ManualClock();
private final ManualSleeper sleeper = new ManualSleeper(clock);
/** Scale timeouts down to make tests fast. */
private final float timeoutScaling = 0.05f;
private final EventHandler eventHandler = mock(EventHandler.class);
private final ProxyHelper proxyHelper = mock(ProxyHelper.class);
private final HttpConnector connector =
new HttpConnector(Locale.US, eventHandler, proxyHelper, sleeper, timeoutScaling);
@Before
public void before() throws Exception {
when(proxyHelper.createProxyIfNeeded(any(URL.class))).thenReturn(Proxy.NO_PROXY);
}
@After
public void after() throws Exception {
executor.shutdown();
}
@Test
public void localFileDownload() throws Exception {
byte[] fileContents = "this is a test".getBytes(UTF_8);
assertThat(
ByteStreams.toByteArray(
connector
.connect(
createTempFile(fileContents).toURI().toURL(),
url -> ImmutableMap.<String, String>of())
.getInputStream()))
.isEqualTo(fileContents);
}
@Test
public void badHost_throwsIOException() throws Exception {
thrown.expect(IOException.class);
thrown.expectMessage("Unknown host: bad.example");
connector.connect(new URL("http://bad.example"), url -> ImmutableMap.<String, String>of());
}
@Test
public void normalRequest() throws Exception {
final Map<String, String> headers = new ConcurrentHashMap<>();
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream(), headers);
sendLines(
socket,
"HTTP/1.1 200 OK",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 5",
"",
"hello");
}
return null;
}
});
try (Reader payload =
new InputStreamReader(
connector
.connect(
new URL(String.format("http://localhost:%d/boo", server.getLocalPort())),
url -> ImmutableMap.of("Content-Encoding", "gzip"))
.getInputStream(),
ISO_8859_1)) {
assertThat(CharStreams.toString(payload)).isEqualTo("hello");
}
}
assertThat(headers).containsEntry("x-method", "GET");
assertThat(headers).containsEntry("x-request-uri", "/boo");
assertThat(headers).containsEntry("content-encoding", "gzip");
}
@Test
public void serverError_retriesConnect() throws Exception {
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 500 Incredible Catastrophe",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 8",
"",
"nononono");
}
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 200 OK",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 5",
"",
"hello");
}
return null;
}
});
try (Reader payload =
new InputStreamReader(
connector
.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of())
.getInputStream(),
ISO_8859_1)) {
assertThat(CharStreams.toString(payload)).isEqualTo("hello");
assertThat(clock.currentTimeMillis()).isEqualTo(100L);
}
}
}
@Test
public void connectionRefused_retries() throws Exception {
final int port;
// Start and immediately stop server socket to get a free port.
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
port = server.getLocalPort();
}
final AtomicReference<ServerSocket> server = new AtomicReference<>();
try {
// Schedule server socket to be started only after retry to simulate connection retry.
sleeper.scheduleRunnable(
() -> {
try {
server.set(new ServerSocket(port, 1, InetAddress.getByName(null)));
} catch (IOException e) {
throw new RuntimeException(e);
}
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
() -> {
while (!executor.isShutdown()) {
try (Socket socket = server.get().accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 200 OK",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 5",
"",
"hello");
}
}
return null;
});
},
1);
try (Reader payload =
new InputStreamReader(
connector
.connect(
new URL(String.format("http://localhost:%d", port)),
url -> ImmutableMap.<String, String>of())
.getInputStream(),
ISO_8859_1)) {
assertThat(CharStreams.toString(payload)).isEqualTo("hello");
}
} finally {
ServerSocket serverSocket = server.get();
if (serverSocket != null) {
serverSocket.close();
}
}
}
// Deactivated due to https://github.com/bazelbuild/bazel/issues/9380.
@Ignore
public void socketTimeout_retries() throws Exception {
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
() -> {
try (Socket socket = server.accept()) {
// Do nothing to cause SocketTimeoutException on client side.
}
// Schedule proper HTTP response once client retries.
sleeper.scheduleRunnable(
() -> {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError2 =
executor.submit(
() -> {
while (!executor.isShutdown()) {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 200 OK",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 5",
"",
"hello");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
},
1);
return null;
});
try (Reader payload =
new InputStreamReader(
connector
.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of())
.getInputStream(),
ISO_8859_1)) {
assertThat(CharStreams.toString(payload)).isEqualTo("hello");
assertThat(clock.currentTimeMillis()).isEqualTo(1);
}
}
}
/**
* It is important part of {@link HttpConnector} contract to not throw raw {@link
* SocketTimeoutException} because it extends {@link java.io.InterruptedIOException} and {@link
* HttpConnectorMultiplexer} relies on {@link java.io.InterruptedIOException} to only be thrown
* when actual interruption happened.
*/
@Test
public void socketTimeout_throwsIOExceptionInsteadOfSocketTimeoutException() throws Exception {
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
() -> {
try (Socket socket = server.accept()) {
// Do nothing to cause SocketTimeoutException on client side.
}
return null;
});
try (Reader payload =
new InputStreamReader(
connector
.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of())
.getInputStream(),
ISO_8859_1)) {
fail("Should have thrown");
} catch (IOException expected) {
assertThat(expected).hasCauseThat().isInstanceOf(SocketTimeoutException.class);
assertThat(expected).hasCauseThat().hasMessageThat().ignoringCase().contains("timed out");
}
}
}
@Test
public void permanentError_doesNotRetryAndThrowsIOException() throws Exception {
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 401 Unauthorized",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 0",
"",
"");
}
return null;
}
});
thrown.expect(IOException.class);
thrown.expectMessage("401 Unauthorized");
connector.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of());
}
}
@Test
public void permanentErrorNotFound_doesNotRetryAndThrowsFileNotFoundException() throws Exception {
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 404 Not Found",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 0",
"",
"");
}
return null;
}
});
thrown.expect(FileNotFoundException.class);
thrown.expectMessage("404 Not Found");
connector.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of());
}
}
@Test
public void permanentError_consumesPayloadBeforeReturningn() throws Exception {
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicBoolean consumed = new AtomicBoolean();
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 501 Oh No",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 1",
"",
"b");
consumed.set(true);
} finally {
barrier.await();
}
return null;
}
});
connector.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of());
fail();
} catch (IOException ignored) {
// ignored
} finally {
barrier.await();
}
assertThat(consumed.get()).isTrue();
assertThat(clock.currentTimeMillis()).isEqualTo(0L);
}
@Test
public void always500_givesUpEventually() throws Exception {
final AtomicInteger tries = new AtomicInteger();
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
while (true) {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 500 Oh My",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 0",
"",
"");
tries.incrementAndGet();
}
}
}
});
thrown.expect(IOException.class);
thrown.expectMessage("500 Oh My");
try {
connector.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of());
} finally {
assertThat(tries.get()).isGreaterThan(2);
}
}
}
@Test
public void serverSays403_clientRetriesAnyway() throws Exception {
final AtomicInteger tries = new AtomicInteger();
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
while (true) {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream());
sendLines(
socket,
"HTTP/1.1 403 Forbidden",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 0",
"",
"");
tries.incrementAndGet();
}
}
}
});
thrown.expect(IOException.class);
thrown.expectMessage("403 Forbidden");
try {
connector.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of());
} finally {
assertThat(tries.get()).isGreaterThan(2);
}
}
}
@Test
public void pathRedirect_301() throws Exception {
redirectToDifferentPath_works("301");
}
@Test
public void serverRedirect_301() throws Exception {
redirectToDifferentServer_works("301");
}
/*
* Also tests behavior for 302 and 307 codes.
*/
@Test
public void pathRedirect_303() throws Exception {
redirectToDifferentPath_works("303");
}
@Test
public void serverRedirects_303() throws Exception {
redirectToDifferentServer_works("303");
}
public void redirectToDifferentPath_works(String code) throws Exception {
String redirectCode = "HTTP/1.1 " + code + " Redirect";
final Map<String, String> headers1 = new ConcurrentHashMap<>();
final Map<String, String> headers2 = new ConcurrentHashMap<>();
try (ServerSocket server = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream(), headers1);
sendLines(
socket,
redirectCode,
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Location: /doodle.tar.gz",
"Content-Length: 0",
"",
"");
}
try (Socket socket = server.accept()) {
readHttpRequest(socket.getInputStream(), headers2);
sendLines(
socket,
"HTTP/1.1 200 OK",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 0",
"",
"");
}
return null;
}
});
URLConnection connection =
connector.connect(
new URL(String.format("http://localhost:%d", server.getLocalPort())),
url -> ImmutableMap.<String, String>of());
assertThat(connection.getURL()).isEqualTo(
new URL(String.format("http://localhost:%d/doodle.tar.gz", server.getLocalPort())));
try (InputStream input = connection.getInputStream()) {
assertThat(ByteStreams.toByteArray(input)).isEmpty();
}
}
assertThat(headers1).containsEntry("x-request-uri", "/");
assertThat(headers2).containsEntry("x-request-uri", "/doodle.tar.gz");
}
public void redirectToDifferentServer_works(String code) throws Exception {
String redirectCode = "HTTP/1.1 " + code + " Redirect";
final String basic1 = "Basic b25lOmZpcnN0c2VjcmV0";
final String basic2 = "Basic dHdvOnNlY29uZHNlY3JldA==";
final Map<String, String> headers1 = new ConcurrentHashMap<>();
final Map<String, String> headers2 = new ConcurrentHashMap<>();
try (ServerSocket server1 = new ServerSocket(0, 1, InetAddress.getByName(null));
ServerSocket server2 = new ServerSocket(0, 1, InetAddress.getByName(null))) {
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server1.accept()) {
readHttpRequest(socket.getInputStream(), headers1);
sendLines(
socket,
redirectCode,
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
String.format(
"Location: http://localhost:%d/doodle.tar.gz", server2.getLocalPort()),
"Content-Length: 0",
"",
"");
}
return null;
}
});
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError1 =
executor.submit(
new Callable<Object>() {
@Override
public Object call() throws Exception {
try (Socket socket = server2.accept()) {
readHttpRequest(socket.getInputStream(), headers2);
sendLines(
socket,
"HTTP/1.1 200 OK",
"Date: Fri, 31 Dec 1999 23:59:59 GMT",
"Connection: close",
"Content-Type: text/plain",
"Content-Length: 5",
"",
"hello");
}
return null;
}
});
// Header function that provides different auth headers for
// the two servers.
Function<URL, ImmutableMap<String, String>> authHeaders =
new Function<URL, ImmutableMap<String, String>>() {
@Override
public ImmutableMap<String, String> apply(URL url) {
if (url.getPort() == server1.getLocalPort()) {
return ImmutableMap.of("Authentication", basic1);
} else if (url.getPort() == server2.getLocalPort()) {
return ImmutableMap.of("Authentication", basic2);
} else {
return ImmutableMap.<String, String>of();
}
}
};
URLConnection connection =
connector.connect(
new URL(String.format("http://localhost:%d", server1.getLocalPort())), authHeaders);
assertThat(connection.getURL()).isEqualTo(
new URL(String.format("http://localhost:%d/doodle.tar.gz", server2.getLocalPort())));
try (InputStream input = connection.getInputStream()) {
assertThat(ByteStreams.toByteArray(input)).isEqualTo("hello".getBytes(US_ASCII));
}
// Verify that the correct form of authentication is used for each server.
assertThat(headers1).containsEntry("authentication", basic1);
assertThat(headers2).containsEntry("authentication", basic2);
}
}
private File createTempFile(byte[] fileContents) throws IOException {
File temp = testFolder.newFile();
try (FileOutputStream outputStream = new FileOutputStream(temp)) {
outputStream.write(fileContents);
}
return temp;
}
}
| apache-2.0 |
SDRC-India/collect | collect_app/src/main/java/org/odk/collect/android/utilities/TextUtils.java | 3961 | /*
* Copyright (C) 2015 University of Washington
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.odk.collect.android.utilities;
import android.text.Html;
import java.util.regex.MatchResult;
public class TextUtils {
private static final String t = "TextUtils";
private static ReplaceCallback.Callback createHeader = new ReplaceCallback.Callback() {
public String matchFound(MatchResult match) {
int level = match.group(1).length();
return "<h" + level + ">" + match.group(2).replaceAll("#+$", "").trim() + "</h" + level
+ ">";
}
};
private static ReplaceCallback.Callback createParagraph = new ReplaceCallback.Callback() {
public String matchFound(MatchResult match) {
String trimmed = match.group(1).trim();
if (trimmed.matches("(?i)^<\\/?(h|p|bl)")) {
return match.group(1);
}
return "<p>" + trimmed + "</p>";
}
};
private static ReplaceCallback.Callback createSpan = new ReplaceCallback.Callback() {
public String matchFound(MatchResult match) {
String attributes = sanitizeAttributes(match.group(1));
return "<font" + attributes + ">" + match.group(2).trim() + "</font>";
}
// throw away all styles except for color and font-family
private String sanitizeAttributes(String attributes) {
String stylesText = attributes.replaceAll("style=[\"'](.*?)[\"']", "$1");
String[] styles = stylesText.trim().split(";");
StringBuffer stylesOutput = new StringBuffer();
for (int i = 0; i < styles.length; i++) {
String[] stylesAttributes = styles[i].trim().split(":");
if (stylesAttributes[0].equals("color")) {
stylesOutput.append(" color=\"" + stylesAttributes[1] + "\"");
}
if (stylesAttributes[0].equals("font-family")) {
stylesOutput.append(" face=\"" + stylesAttributes[1] + "\"");
}
}
return stylesOutput.toString();
}
};
private static String markdownToHtml(String text) {
// https://github.com/enketo/enketo-transformer/blob/master/src/markdown.js
// span - replaced < and > with <>
text = ReplaceCallback.replace("(?s)<\\s?span([^\\/\n]*)>((?:(?!<\\/).)+)<\\/\\s?span\\s?>",
text, createSpan);
// strong
text = text.replaceAll("(?s)__(.*?)__", "<strong>$1</strong>");
text = text.replaceAll("(?s)\\*\\*(.*?)\\*\\*", "<strong>$1</strong>");
// emphasis
text = text.replaceAll("(?s)_([^\\s][^_\n]*)_", "<em>$1</em>");
text = text.replaceAll("(?s)\\*([^\\s][^\\*\n]*)\\*", "<em>$1</em>");
// links
text = text.replaceAll("(?s)\\[([^\\]]*)\\]\\(([^\\)]+)\\)",
"<a href=\"$2\" target=\"_blank\">$1</a>");
// headers - requires ^ or breaks <font color="#f58a1f">color</font>
text = ReplaceCallback.replace("(?s)^(#+)([^\n]*)$", text, createHeader);
// paragraphs
text = ReplaceCallback.replace("(?s)([^\n]+)\n", text, createParagraph);
return text;
}
public static CharSequence textToHtml(String text) {
if (text == null) {
return null;
}
return Html.fromHtml(markdownToHtml(text));
}
}
| apache-2.0 |
Ant-Droid/android_frameworks_base_OLD | services/core/java/com/android/server/wm/InputConsumerImpl.java | 3963 | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server.wm;
import android.os.Looper;
import android.os.Process;
import android.view.Display;
import android.view.InputChannel;
import android.view.InputEventReceiver;
import android.view.WindowManager;
import android.view.WindowManagerPolicy;
import com.android.server.input.InputApplicationHandle;
import com.android.server.input.InputWindowHandle;
public final class InputConsumerImpl implements WindowManagerPolicy.InputConsumer {
final WindowManagerService mService;
final InputChannel mServerChannel, mClientChannel;
final InputApplicationHandle mApplicationHandle;
final InputWindowHandle mWindowHandle;
final InputEventReceiver mInputEventReceiver;
final int mWindowLayer;
public InputConsumerImpl(WindowManagerService service, Looper looper,
InputEventReceiver.Factory inputEventReceiverFactory) {
String name = "input consumer";
mService = service;
InputChannel[] channels = InputChannel.openInputChannelPair(name);
mServerChannel = channels[0];
mClientChannel = channels[1];
mService.mInputManager.registerInputChannel(mServerChannel, null);
mInputEventReceiver = inputEventReceiverFactory.createInputEventReceiver(
mClientChannel, looper);
mApplicationHandle = new InputApplicationHandle(null);
mApplicationHandle.name = name;
mApplicationHandle.dispatchingTimeoutNanos =
WindowManagerService.DEFAULT_INPUT_DISPATCHING_TIMEOUT_NANOS;
mWindowHandle = new InputWindowHandle(mApplicationHandle, null, Display.DEFAULT_DISPLAY);
mWindowHandle.name = name;
mWindowHandle.inputChannel = mServerChannel;
mWindowHandle.layoutParamsType = WindowManager.LayoutParams.TYPE_INPUT_CONSUMER;
mWindowLayer = getLayerLw(mWindowHandle.layoutParamsType);
mWindowHandle.layer = mWindowLayer;
mWindowHandle.layoutParamsFlags = 0;
mWindowHandle.dispatchingTimeoutNanos =
WindowManagerService.DEFAULT_INPUT_DISPATCHING_TIMEOUT_NANOS;
mWindowHandle.visible = true;
mWindowHandle.canReceiveKeys = false;
mWindowHandle.hasFocus = false;
mWindowHandle.hasWallpaper = false;
mWindowHandle.paused = false;
mWindowHandle.ownerPid = Process.myPid();
mWindowHandle.ownerUid = Process.myUid();
mWindowHandle.inputFeatures = 0;
mWindowHandle.scaleFactor = 1.0f;
}
void layout(int dw, int dh) {
mWindowHandle.touchableRegion.set(0, 0, dw, dh);
mWindowHandle.frameLeft = 0;
mWindowHandle.frameTop = 0;
mWindowHandle.frameRight = dw;
mWindowHandle.frameBottom = dh;
}
@Override
public void dismiss() {
synchronized (mService.mWindowMap) {
if (mService.removeInputConsumer()) {
mInputEventReceiver.dispose();
mService.mInputManager.unregisterInputChannel(mServerChannel);
mClientChannel.dispose();
mServerChannel.dispose();
}
}
}
private int getLayerLw(int windowType) {
return mService.mPolicy.windowTypeToLayerLw(windowType)
* WindowManagerService.TYPE_LAYER_MULTIPLIER
+ WindowManagerService.TYPE_LAYER_OFFSET;
}
}
| apache-2.0 |
emetsger/fcrepo4 | fcrepo-kernel-modeshape/src/test/java/org/fcrepo/kernel/modeshape/utils/iterators/RdfAdderTest.java | 11380 | /*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.kernel.modeshape.utils.iterators;
import static org.apache.jena.datatypes.xsd.XSDDatatype.XSDstring;
import static org.apache.jena.graph.NodeFactory.createBlankNode;
import static org.apache.jena.graph.NodeFactory.createLiteral;
import static org.apache.jena.graph.NodeFactory.createURI;
import static org.apache.jena.graph.Triple.create;
import static org.apache.jena.rdf.model.ModelFactory.createDefaultModel;
import static org.apache.jena.rdf.model.ResourceFactory.createResource;
import static org.apache.jena.vocabulary.RDF.type;
import static javax.jcr.PropertyType.STRING;
import static javax.jcr.PropertyType.UNDEFINED;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_RESOURCE;
import static org.fcrepo.kernel.modeshape.FedoraJcrConstants.FIELD_DELIMITER;
import static org.fcrepo.kernel.modeshape.rdf.JcrRdfTools.getJcrNamespaceForRDFNamespace;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import java.util.stream.Stream;
import javax.jcr.NamespaceException;
import javax.jcr.Node;
import javax.jcr.Property;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.Value;
import javax.jcr.ValueFactory;
import javax.jcr.Workspace;
import javax.jcr.nodetype.NodeType;
import javax.jcr.nodetype.NodeTypeManager;
import javax.jcr.nodetype.NodeTypeTemplate;
import javax.jcr.nodetype.PropertyDefinition;
import org.fcrepo.kernel.api.models.FedoraResource;
import org.fcrepo.kernel.api.exception.MalformedRdfException;
import org.fcrepo.kernel.api.identifiers.IdentifierConverter;
import org.fcrepo.kernel.api.RdfStream;
import org.fcrepo.kernel.api.rdf.DefaultRdfStream;
import org.fcrepo.kernel.modeshape.FedoraResourceImpl;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.modeshape.jcr.api.NamespaceRegistry;
import org.apache.jena.graph.Triple;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
/**
* <p>RdfAdderTest class.</p>
*
* @author ajs6f
*/
public class RdfAdderTest {
private RdfAdder testAdder;
private static final Model m = createDefaultModel();
private static final String propertyNamespacePrefix = "ex";
private static final String propertyNamespaceUri =
"http://www.example.com#";
private static final String propertyBaseName = "example-property";
private static final String propertyLongName = propertyNamespaceUri
+ propertyBaseName;
private static final String propertyShortName = propertyNamespacePrefix
+ ":" + propertyBaseName;
private static final String description = "Description.";
private static final Triple descriptiveTriple = create(createBlankNode(),
createURI(propertyLongName), createLiteral(description));
private static final Statement descriptiveStmnt = m
.asStatement(descriptiveTriple);
private static final Resource mockNodeSubject = descriptiveStmnt
.getSubject();
private static final String mixinLongName = type.getNameSpace() + "someType";
private static final String mixinShortName = "rdf" + ":" + "someType";
private static final Resource mixinObject = createResource(mixinLongName);
private static final Triple mixinTriple = create(mockNodeSubject.asNode(),
type.asNode(), mixinObject.asNode());
private static final org.apache.jena.graph.Node testSubject = createURI("subject");
private static final Statement mixinStmnt = m.asStatement(mixinTriple);
@Test
public void testAddingProperty() throws Exception {
testAdder = new RdfAdder(mockGraphSubjects, mockSession, testStream);
when(mockNode.setProperty(propertyShortName, mockValue, UNDEFINED)).thenReturn(mockProperty);
testAdder.operateOnProperty(descriptiveStmnt, resource);
verify(mockNode).setProperty(propertyShortName, mockValue, UNDEFINED);
}
@Test
public void testAddingModelWithStreamNamespace() throws Exception {
testAdder = new RdfAdder(mockGraphSubjects, mockSession, testStream);
testAdder.operateOnMixin(mixinStmnt.getObject().asResource(), resource);
verify(mockNode).addMixin(anyString());
}
@Test
public void testAddingModelWithPrimaryType() throws Exception {
testAdder = new RdfAdder(mockGraphSubjects, mockSession, testStream);
when(mockNode.isNodeType(mixinShortName)).thenReturn(true);
testAdder.operateOnMixin(createResource(mixinLongName), resource);
verify(mockNode, never()).addMixin(mixinShortName);
}
@Test
public void testAddingWithNotYetDefinedNamespace() throws Exception {
// we drop our stream namespace map
testStream = new DefaultRdfStream(testSubject, mockTriples);
when(
mockSession
.getNamespacePrefix(getJcrNamespaceForRDFNamespace(type
.getNameSpace()))).thenThrow(new NamespaceException("Expected."));
testAdder = new RdfAdder(mockGraphSubjects, mockSession, testStream);
testAdder.operateOnMixin(mixinStmnt.getObject().asResource(), resource);
}
@Test
public void testAddingWithRepoNamespace() throws Exception {
// we drop our stream namespace map
testStream = new DefaultRdfStream(testSubject, mockTriples);
when(
mockSession
.getNamespacePrefix(getJcrNamespaceForRDFNamespace(type
.getNameSpace()))).thenReturn("rdf");
testAdder = new RdfAdder(mockGraphSubjects, mockSession, testStream);
testAdder.operateOnMixin(mixinStmnt.getObject().asResource(), resource);
}
@Test(expected = MalformedRdfException.class)
public void testAddingWithBadMixinOnNode() throws Exception {
when(mockNode.canAddMixin(mixinShortName)).thenReturn(false);
testAdder = new RdfAdder(mockGraphSubjects, mockSession, testStream);
testAdder.operateOnMixin(mixinStmnt.getObject().asResource(), resource);
}
@Test
public void testAddingWithBadMixinForRepo() throws Exception {
when(mockNodeTypeManager.hasNodeType(mixinShortName)).thenReturn(false);
testAdder = new RdfAdder(mockGraphSubjects, mockSession, testStream);
testAdder.operateOnMixin(mixinStmnt.getObject().asResource(), resource);
verify(mockNodeTypeManager).registerNodeType(mockNodeTypeTemplate, false);
verify(mockNodeTypeTemplate).setName(mixinShortName);
verify(mockNodeTypeTemplate).setMixin(true);
}
@Before
public void setUp() throws RepositoryException {
initMocks(this);
when(mockNode.getSession()).thenReturn(mockSession);
when(mockNode.getName()).thenReturn("mockNode");
when(mockNode.getPath()).thenReturn("/mockNode");
when(mockSession.getWorkspace()).thenReturn(mockWorkspace);
when(mockSession.getValueFactory()).thenReturn(mockValueFactory);
when(
mockSession
.getNamespacePrefix(getJcrNamespaceForRDFNamespace(type
.getNameSpace()))).thenReturn("rdf");
when(mockValueFactory.createValue(description + FIELD_DELIMITER + XSDstring.getURI(), STRING))
.thenReturn(mockValue);
when(mockWorkspace.getNamespaceRegistry()).thenReturn(
mockNamespaceRegistry);
when(mockNamespaceRegistry.getURI(propertyNamespacePrefix)).thenReturn(
propertyNamespaceUri);
when(mockNamespaceRegistry.getURI("rdf")).thenReturn(
type.getNameSpace());
when(mockNamespaceRegistry.isRegisteredUri(propertyNamespaceUri))
.thenReturn(true);
when(mockNamespaceRegistry.isRegisteredUri(type.getNameSpace()))
.thenReturn(true);
when(mockNamespaceRegistry.getPrefixes()).thenReturn(new String[]{propertyNamespaceUri, type.getNameSpace()});
when(mockNamespaceRegistry.getPrefix(propertyNamespaceUri)).thenReturn(
propertyNamespacePrefix);
when(mockNamespaceRegistry.getPrefix(type.getNameSpace())).thenReturn(
"rdf");
when(mockWorkspace.getNodeTypeManager())
.thenReturn(mockNodeTypeManager);
when(mockNodeTypeManager.getNodeType(FEDORA_RESOURCE)).thenReturn(
mockNodeType);
when(mockNodeTypeManager.createNodeTypeTemplate()).thenReturn(mockNodeTypeTemplate);
when(mockNodeTypeManager.hasNodeType(mixinShortName)).thenReturn(true);
when(mockNode.getPrimaryNodeType()).thenReturn(mockNodeType);
when(mockNode.getMixinNodeTypes()).thenReturn(new NodeType[] {});
when(mockNode.canAddMixin(mixinShortName)).thenReturn(true);
when(mockNodeType.getPropertyDefinitions()).thenReturn(
new PropertyDefinition[] {mockPropertyDefinition});
when(mockPropertyDefinition.isMultiple()).thenReturn(false);
when(mockPropertyDefinition.getName()).thenReturn(propertyShortName);
when(mockPropertyDefinition.getRequiredType()).thenReturn(STRING);
when(mockProperty.getName()).thenReturn(propertyShortName);
when(mockGraphSubjects.reverse()).thenReturn(mockReverseGraphSubjects);
//TODO? when(mockReverseGraphSubjects.convert(mockNode)).thenReturn(mockNodeSubject);
resource = new FedoraResourceImpl(mockNode);
testStream = new DefaultRdfStream(testSubject, mockTriples);
}
private FedoraResource resource;
@Mock
private Node mockNode;
@Mock
private Workspace mockWorkspace;
@Mock
private ValueFactory mockValueFactory;
@Mock
private Value mockValue;
@Mock
private NamespaceRegistry mockNamespaceRegistry;
@Mock
private NodeTypeManager mockNodeTypeManager;
@Mock
private NodeTypeTemplate mockNodeTypeTemplate;
@Mock
private NodeType mockNodeType;
@Mock
private PropertyDefinition mockPropertyDefinition;
@Mock
private Session mockSession;
@Mock
private Stream<Triple> mockTriples;
private RdfStream testStream;
@Mock
private IdentifierConverter<Resource, FedoraResource> mockGraphSubjects;
@Mock
private IdentifierConverter<FedoraResource,Resource> mockReverseGraphSubjects;
@Mock
private Property mockProperty;
}
| apache-2.0 |
spring-projects/spring-security | ldap/src/integration-test/java/org/springframework/security/ldap/userdetails/LdapUserDetailsManagerModifyPasswordTests.java | 4186 | /*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.ldap.userdetails;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.ldap.core.ContextSource;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.ldap.DefaultLdapUsernameToDnMapper;
import org.springframework.security.ldap.DefaultSpringSecurityContextSource;
import org.springframework.security.ldap.SpringSecurityLdapTemplate;
import org.springframework.security.ldap.server.UnboundIdContainer;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
/**
* Tests for {@link LdapUserDetailsManager#changePassword}, specifically relating to the
* use of the Modify Password Extended Operation.
*
* @author Josh Cummings
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = LdapUserDetailsManagerModifyPasswordTests.UnboundIdContainerConfiguration.class)
public class LdapUserDetailsManagerModifyPasswordTests {
LdapUserDetailsManager userDetailsManager;
@Autowired
ContextSource contextSource;
@BeforeEach
public void setup() {
this.userDetailsManager = new LdapUserDetailsManager(this.contextSource);
this.userDetailsManager.setUsePasswordModifyExtensionOperation(true);
this.userDetailsManager.setUsernameMapper(new DefaultLdapUsernameToDnMapper("ou=people", "uid"));
}
@Test
@WithMockUser(username = "bob", password = "bobspassword", authorities = "ROLE_USER")
public void changePasswordWhenOldPasswordIsIncorrectThenThrowsException() {
assertThatExceptionOfType(BadCredentialsException.class)
.isThrownBy(() -> this.userDetailsManager.changePassword("wrongoldpassword", "bobsnewpassword"));
}
@Test
@WithMockUser(username = "bob", password = "bobspassword", authorities = "ROLE_USER")
public void changePasswordWhenOldPasswordIsCorrectThenPasses() {
SpringSecurityLdapTemplate template = new SpringSecurityLdapTemplate(this.contextSource);
this.userDetailsManager.changePassword("bobspassword",
"bobsshinynewandformidablylongandnearlyimpossibletorememberthoughdemonstrablyhardtocrackduetoitshighlevelofentropypasswordofjustice");
assertThat(template.compare("uid=bob,ou=people", "userPassword",
"bobsshinynewandformidablylongandnearlyimpossibletorememberthoughdemonstrablyhardtocrackduetoitshighlevelofentropypasswordofjustice"))
.isTrue();
}
@Configuration
static class UnboundIdContainerConfiguration implements DisposableBean {
private UnboundIdContainer container = new UnboundIdContainer("dc=springframework,dc=org",
"classpath:test-server.ldif");
@Bean
UnboundIdContainer ldapContainer() {
this.container.setPort(0);
return this.container;
}
@Bean
ContextSource contextSource(UnboundIdContainer container) {
return new DefaultSpringSecurityContextSource(
"ldap://127.0.0.1:" + container.getPort() + "/dc=springframework,dc=org");
}
@Override
public void destroy() throws Exception {
this.container.stop();
}
}
}
| apache-2.0 |
rpmoore/ds3_java_sdk | ds3-sdk/src/main/java/com/spectralogic/ds3client/commands/spectrads3/GetPoolSpectraS3Response.java | 1423 | /*
* ******************************************************************************
* Copyright 2014-2019 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
package com.spectralogic.ds3client.commands.spectrads3;
import com.spectralogic.ds3client.models.Pool;
import com.spectralogic.ds3client.models.ChecksumType;
import com.spectralogic.ds3client.commands.interfaces.AbstractResponse;
public class GetPoolSpectraS3Response extends AbstractResponse {
private final Pool poolResult;
public GetPoolSpectraS3Response(final Pool poolResult, final String checksum, final ChecksumType.Type checksumType) {
super(checksum, checksumType);
this.poolResult = poolResult;
}
public Pool getPoolResult() {
return this.poolResult;
}
} | apache-2.0 |
api-ai/apiai-java-sdk | libai/src/main/java/ai/api/AIDataService.java | 34960 | /**
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.api;
import com.google.gson.Gson;
import com.google.gson.JsonParseException;
import com.google.gson.JsonSyntaxException;
import ai.api.util.IOUtils;
import ai.api.util.StringUtils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Type;
import java.net.*;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.api.http.HttpClient;
import ai.api.model.AIContext;
import ai.api.model.AIRequest;
import ai.api.model.AIResponse;
import ai.api.model.Entity;
import ai.api.model.Status;
/**
* Do simple requests to the AI Service
*/
public class AIDataService {
private static final Logger logger = LoggerFactory.getLogger(AIDataService.class);
private static final AIServiceContext UNDEFINED_SERVICE_CONTEXT = null;
private static final String REQUEST_METHOD_POST = "POST";
private static final String REQUEST_METHOD_DELETE = "DELETE";
private static final String REQUEST_METHOD_GET = "GET";
private static final String DEFAULT_REQUEST_METHOD = REQUEST_METHOD_POST;
/**
* Cannot be <code>null</code>
*/
private final static Gson GSON = GsonFactory.getDefaultFactory().getGson();
/**
* Cannot be <code>null</code>
*/
private final AIConfiguration config;
/**
* Cannot be <code>null</code>
*/
private final AIServiceContext defaultServiceContext;
/**
* Create new service for given configuration and some predefined service context
*
* @param config Service configuration data. Cannot be <code>null</code>
* @param serviceContext Service context. If <code>null</code> then new context will be created
* @throws IllegalArgumentException If config parameter is null
*/
public AIDataService(final AIConfiguration config, final AIServiceContext serviceContext) {
if (config == null) {
throw new IllegalArgumentException("config should not be null");
}
this.config = config.clone();
if (serviceContext == null) {
this.defaultServiceContext = new AIServiceContextBuilder().generateSessionId().build();
} else {
this.defaultServiceContext = serviceContext;
}
}
/**
* Create new service with unique context for given configuration
*
* @param config Service configuration data. Cannot be <code>null</code>
* @throws IllegalArgumentException If config parameter is null
*/
public AIDataService(final AIConfiguration config) {
this(config, null);
}
/**
* @return Current context used in each request. Never <code>null</code>
*/
public AIServiceContext getContext() {
return defaultServiceContext;
}
/**
* Make request to the AI service.
*
* @param request request object to the service. Cannot be <code>null</code>
* @return response object from service. Never <code>null</code>
*/
public AIResponse request(final AIRequest request) throws AIServiceException {
return request(request, (RequestExtras) null);
}
/**
* Make request to the AI service.
*
* @param request request object to the service. Cannot be <code>null</code>
* @param serviceContext custom service context that should be used instead of the default context
* @return response object from service. Never <code>null</code>
*/
public AIResponse request(final AIRequest request, final AIServiceContext serviceContext)
throws AIServiceException {
return request(request, (RequestExtras) null, serviceContext);
}
/**
* Make request to the AI service.
*
* @param request request object to the service. Cannot be <code>null</code>
* @param requestExtras object that can hold additional contexts and entities
* @return response object from service. Never <code>null</code>
*/
public AIResponse request(final AIRequest request, final RequestExtras requestExtras)
throws AIServiceException {
return request(request, requestExtras, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Make request to the AI service.
*
* @param request request object to the service. Cannot be <code>null</code>
* @param requestExtras object that can hold additional contexts and entities
* @param serviceContext custom service context that should be used instead of the default context
* @return response object from service. Never <code>null</code>
*/
public AIResponse request(final AIRequest request, final RequestExtras requestExtras,
final AIServiceContext serviceContext) throws AIServiceException {
if (request == null) {
throw new IllegalArgumentException("Request argument must not be null");
}
logger.debug("Start request");
try {
if (StringUtils.isEmpty(request.getLanguage())) {
request.setLanguage(config.getApiAiLanguage());
}
if (StringUtils.isEmpty(request.getSessionId())) {
request.setSessionId(getSessionId(serviceContext));
}
if (StringUtils.isEmpty(request.getTimezone())) {
request.setTimezone(getTimeZone(serviceContext));
}
Map<String, String> additionalHeaders = null;
if (requestExtras != null) {
fillRequest(request, requestExtras);
additionalHeaders = requestExtras.getAdditionalHeaders();
}
final String queryData = GSON.toJson(request);
final String response = doTextRequest(config.getQuestionUrl(request.getSessionId()),
queryData, additionalHeaders);
if (StringUtils.isEmpty(response)) {
throw new AIServiceException(
"Empty response from ai service. Please check configuration and Internet connection.");
}
logger.debug("Response json: " + response.replaceAll("[\r\n]+", " "));
final AIResponse aiResponse = GSON.fromJson(response, AIResponse.class);
if (aiResponse == null) {
throw new AIServiceException(
"API.AI response parsed as null. Check debug log for details.");
}
if (aiResponse.isError()) {
throw new AIServiceException(aiResponse);
}
aiResponse.cleanup();
return aiResponse;
} catch (final MalformedURLException e) {
logger.error("Malformed url should not be raised", e);
throw new AIServiceException("Wrong configuration. Please, connect to API.AI Service support",
e);
} catch (final JsonSyntaxException je) {
throw new AIServiceException(
"Wrong service answer format. Please, connect to API.AI Service support", je);
}
}
/**
* Make requests to the AI service with voice data.
*
* @param voiceStream voice data stream for recognition. Cannot be <code>null</code>
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse voiceRequest(final InputStream voiceStream) throws AIServiceException {
return voiceRequest(voiceStream, new RequestExtras());
}
/**
* Make requests to the AI service with voice data.
*
* @param voiceStream voice data stream for recognition. Cannot be <code>null</code>
* @param aiContexts additional contexts for request
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse voiceRequest(final InputStream voiceStream, final List<AIContext> aiContexts)
throws AIServiceException {
return voiceRequest(voiceStream, new RequestExtras(aiContexts, null));
}
/**
* Make requests to the AI service with voice data.
*
* @param voiceStream voice data stream for recognition. Cannot be <code>null</code>
* @param requestExtras object that can hold additional contexts and entities
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse voiceRequest(final InputStream voiceStream, final RequestExtras requestExtras)
throws AIServiceException {
return voiceRequest(voiceStream, requestExtras, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Make requests to the AI service with voice data.
*
* @param voiceStream voice data stream for recognition. Cannot be <code>null</code>
* @param requestExtras object that can hold additional contexts and entities
* @param serviceContext custom service context that should be used instead of the default context
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse voiceRequest(final InputStream voiceStream, final RequestExtras requestExtras,
final AIServiceContext serviceContext) throws AIServiceException {
assert voiceStream != null;
logger.debug("Start voice request");
try {
final AIRequest request = new AIRequest();
request.setLanguage(config.getApiAiLanguage());
request.setSessionId(getSessionId(serviceContext));
request.setTimezone(getTimeZone(serviceContext));
Map<String, String> additionalHeaders = null;
if (requestExtras != null) {
fillRequest(request, requestExtras);
additionalHeaders = requestExtras.getAdditionalHeaders();
}
final String queryData = GSON.toJson(request);
logger.debug("Request json: " + queryData);
final String response = doSoundRequest(voiceStream, queryData, additionalHeaders);
if (StringUtils.isEmpty(response)) {
throw new AIServiceException("Empty response from ai service. Please check configuration.");
}
logger.debug("Response json: " + response);
final AIResponse aiResponse = GSON.fromJson(response, AIResponse.class);
if (aiResponse == null) {
throw new AIServiceException(
"API.AI response parsed as null. Check debug log for details.");
}
if (aiResponse.isError()) {
throw new AIServiceException(aiResponse);
}
aiResponse.cleanup();
return aiResponse;
} catch (final MalformedURLException e) {
logger.error("Malformed url should not be raised", e);
throw new AIServiceException("Wrong configuration. Please, connect to AI Service support", e);
} catch (final JsonSyntaxException je) {
throw new AIServiceException(
"Wrong service answer format. Please, connect to API.AI Service support", je);
}
}
/**
* Forget all old contexts
*
* @return true if operation succeed, false otherwise
*/
@Deprecated
public boolean resetContexts() {
final AIRequest cleanRequest = new AIRequest();
cleanRequest.setQuery("empty_query_for_resetting_contexts"); // TODO remove it after protocol
// fix
cleanRequest.setResetContexts(true);
try {
final AIResponse response = request(cleanRequest);
return !response.isError();
} catch (final AIServiceException e) {
logger.error("Exception while contexts clean.", e);
return false;
}
}
/**
* Retrieves the list of all currently active contexts for a session
*
* @return List of contexts, or empty list if there is no any active contexts
* @throws AIServiceException
*/
public List<AIContext> getActiveContexts() throws AIServiceException {
return getActiveContexts(UNDEFINED_SERVICE_CONTEXT);
}
/**
* Retrieves the list of all currently active contexts for a session
*
* @param serviceContext custom service context that should be used instead of the default context
* @return List of contexts, or empty list if there is no any active contexts
* @throws AIServiceException
*/
public List<AIContext> getActiveContexts(final AIServiceContext serviceContext)
throws AIServiceException {
try {
return doRequest(ApiActiveContextListResponse.class,
config.getContextsUrl(getSessionId(serviceContext)), REQUEST_METHOD_GET);
} catch (BadResponseStatusException e) {
throw new AIServiceException(e.response);
}
}
/**
* Retrieves the specified context for a session
*
* @param contextName The context name
* @return <code>null</code> if context not found
* @throws AIServiceException
*/
public AIContext getActiveContext(final String contextName) throws AIServiceException {
return getActiveContext(contextName, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Retrieves the specified context for a session
*
* @param contextName The context name
* @param serviceContext custom service context that should be used instead of the default context
* @return <code>null</code> if context not found
* @throws AIServiceException
*/
public AIContext getActiveContext(final String contextName, final AIServiceContext serviceContext)
throws AIServiceException {
try {
return doRequest(AIContext.class,
config.getContextsUrl(getSessionId(serviceContext), contextName), REQUEST_METHOD_GET);
} catch (BadResponseStatusException e) {
if (e.response.getStatus().getCode() == 404) {
return null;
} else {
throw new AIServiceException(e.response);
}
}
}
/**
* Adds new active contexts for a session
*
* @param contexts Iterable collection of contexts
* @return List of added context names, or empty list if no contexts were added
* @throws AIServiceException
*/
public List<String> addActiveContext(final Iterable<AIContext> contexts)
throws AIServiceException {
return addActiveContext(contexts, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Adds new active contexts for a session
*
* @param contexts Iterable collection of contexts
* @param serviceContext custom service context that should be used instead of the default context
* @return List of added context names, or empty list if no contexts were added
* @throws AIServiceException
*/
public List<String> addActiveContext(final Iterable<AIContext> contexts,
final AIServiceContext serviceContext) throws AIServiceException {
ApiActiveContextNamesResponse response;
try {
response = doRequest(contexts, ApiActiveContextNamesResponse.class,
config.getContextsUrl(getSessionId(serviceContext)), REQUEST_METHOD_POST);
return response.names;
} catch (BadResponseStatusException e) {
throw new AIServiceException(e.response);
}
}
/**
* Adds new active context for a session
*
* @param context New context
* @return Name of added context
* @throws AIServiceException
*/
public String addActiveContext(final AIContext context) throws AIServiceException {
return addActiveContext(context, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Adds new active context for a session
*
* @param context New context
* @param serviceContext custom service context that should be used instead of the default context
* @return Name of added context
* @throws AIServiceException
*/
public String addActiveContext(final AIContext context, final AIServiceContext serviceContext)
throws AIServiceException {
ApiActiveContextNamesResponse response;
try {
response = doRequest(context, ApiActiveContextNamesResponse.class,
config.getContextsUrl(getSessionId(serviceContext)), REQUEST_METHOD_POST);
return response.names != null && response.names.size() > 0 ? response.names.get(0) : null;
} catch (BadResponseStatusException e) {
throw new AIServiceException(e.response);
}
}
/**
* Deletes all active contexts for a session
*
* @throws AIServiceException
*/
public void resetActiveContexts() throws AIServiceException {
resetActiveContexts(UNDEFINED_SERVICE_CONTEXT);
}
/**
* Deletes all active contexts for a session
*
* @param serviceContext custom service context that should be used instead of the default context
* @throws AIServiceException
*/
public void resetActiveContexts(final AIServiceContext serviceContext) throws AIServiceException {
try {
doRequest(AIResponse.class, config.getContextsUrl(getSessionId(serviceContext)),
REQUEST_METHOD_DELETE);
} catch (BadResponseStatusException e) {
throw new AIServiceException(e.response);
}
}
/**
* Deletes the specified context for a session
*
* @param contextName The context name
* @return <code>false</code> if context was not delete, <code>true</code> in otherwise case
* @throws AIServiceException
*/
public boolean removeActiveContext(final String contextName) throws AIServiceException {
return removeActiveContext(contextName, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Deletes the specified context for a session
*
* @param contextName The context name
* @param serviceContext custom service context that should be used instead of the default context
* @return <code>false</code> if context was not delete, <code>true</code> in otherwise case
* @throws AIServiceException
*/
public boolean removeActiveContext(final String contextName,
final AIServiceContext serviceContext) throws AIServiceException {
try {
doRequest(AIResponse.class, config.getContextsUrl(getSessionId(serviceContext), contextName),
REQUEST_METHOD_DELETE);
return true;
} catch (BadResponseStatusException e) {
if (e.response.getStatus().getCode() == 404) {
return false;
} else {
throw new AIServiceException(e.response);
}
}
}
/**
* Add new entity to an agent entity list
*
* @param userEntity new entity data
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse uploadUserEntity(final Entity userEntity) throws AIServiceException {
return uploadUserEntity(userEntity, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Add new entity to an agent entity list
*
* @param userEntity new entity data
* @param serviceContext custom service context that should be used instead of the default context
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse uploadUserEntity(final Entity userEntity, AIServiceContext serviceContext)
throws AIServiceException {
return uploadUserEntities(Collections.singleton(userEntity), serviceContext);
}
/**
* Add a bunch of new entity to an agent entity list
*
* @param userEntities collection of a new entity data
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse uploadUserEntities(final Collection<Entity> userEntities)
throws AIServiceException {
return uploadUserEntities(userEntities, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Add a bunch of new entity to an agent entity list
*
* @param userEntities collection of a new entity data
* @param serviceContext custom service context that should be used instead of the default context
* @return response object from service. Never <code>null</code>
* @throws AIServiceException
*/
public AIResponse uploadUserEntities(final Collection<Entity> userEntities,
AIServiceContext serviceContext) throws AIServiceException {
if (userEntities == null || userEntities.size() == 0) {
throw new AIServiceException("Empty entities list");
}
final String requestData = GSON.toJson(userEntities);
try {
final String response =
doTextRequest(config.getUserEntitiesEndpoint(getSessionId(serviceContext)), requestData);
if (StringUtils.isEmpty(response)) {
throw new AIServiceException(
"Empty response from ai service. Please check configuration and Internet connection.");
}
logger.debug("Response json: " + response);
final AIResponse aiResponse = GSON.fromJson(response, AIResponse.class);
if (aiResponse == null) {
throw new AIServiceException(
"API.AI response parsed as null. Check debug log for details.");
}
if (aiResponse.isError()) {
throw new AIServiceException(aiResponse);
}
aiResponse.cleanup();
return aiResponse;
} catch (final MalformedURLException e) {
logger.error("Malformed url should not be raised", e);
throw new AIServiceException("Wrong configuration. Please, connect to AI Service support", e);
} catch (final JsonSyntaxException je) {
throw new AIServiceException(
"Wrong service answer format. Please, connect to API.AI Service support", je);
}
}
/**
* @param requestJson Cannot be <code>null</code>
* @param serviceContext custom service context that should be used instead of the default context
* @return Response string
* @throws MalformedURLException
* @throws AIServiceException
*/
protected String doTextRequest(final String requestJson, AIServiceContext serviceContext)
throws MalformedURLException, AIServiceException {
return doTextRequest(config.getQuestionUrl(getSessionId(serviceContext)), requestJson);
}
/**
* @param requestJson Cannot be <code>null</code>
* @return Response string
* @throws MalformedURLException
* @throws AIServiceException
*/
protected String doTextRequest(final String requestJson)
throws MalformedURLException, AIServiceException {
return doTextRequest(requestJson, UNDEFINED_SERVICE_CONTEXT);
}
/**
* @param endpoint Cannot be <code>null</code>
* @param requestJson Cannot be <code>null</code>
* @return Response string
* @throws MalformedURLException
* @throws AIServiceException
*/
protected String doTextRequest(final String endpoint, final String requestJson)
throws MalformedURLException, AIServiceException {
return doTextRequest(endpoint, requestJson, null);
}
/**
* @param endpoint Cannot be <code>null</code>
* @param requestJson Cannot be <code>null</code>
* @param additionalHeaders
* @return Response string
* @throws MalformedURLException
* @throws AIServiceException
*/
protected String doTextRequest(final String endpoint, final String requestJson,
final Map<String, String> additionalHeaders)
throws MalformedURLException, AIServiceException {
// TODO call doRequest method
assert endpoint != null;
assert requestJson != null;
HttpURLConnection connection = null;
try {
final URL url = new URL(endpoint);
final String queryData = requestJson;
logger.debug("Request json: " + queryData);
if (config.getProxy() != null) {
connection = (HttpURLConnection) url.openConnection(config.getProxy());
} else {
connection = (HttpURLConnection) url.openConnection();
}
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.addRequestProperty("Authorization", "Bearer " + config.getApiKey());
connection.addRequestProperty("Content-Type", "application/json; charset=utf-8");
connection.addRequestProperty("Accept", "application/json");
if (additionalHeaders != null) {
for (final Map.Entry<String, String> entry : additionalHeaders.entrySet()) {
connection.addRequestProperty(entry.getKey(), entry.getValue());
}
}
connection.connect();
final BufferedOutputStream outputStream =
new BufferedOutputStream(connection.getOutputStream());
IOUtils.writeAll(queryData, outputStream);
outputStream.close();
final InputStream inputStream = new BufferedInputStream(connection.getInputStream());
final String response = IOUtils.readAll(inputStream);
inputStream.close();
return response;
} catch (final IOException e) {
if (connection != null) {
try {
final InputStream errorStream = connection.getErrorStream();
if (errorStream != null) {
final String errorString = IOUtils.readAll(errorStream);
logger.debug(errorString);
return errorString;
} else {
throw new AIServiceException("Can't connect to the api.ai service.", e);
}
} catch (final IOException ex) {
logger.warn("Can't read error response", ex);
}
}
logger.error(
"Can't make request to the API.AI service. Please, check connection settings and API access token.",
e);
throw new AIServiceException(
"Can't make request to the API.AI service. Please, check connection settings and API access token.",
e);
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
/**
* Method extracted for testing purposes
*
* @param voiceStream Cannot be <code>null</code>
* @param queryData Cannot be <code>null</code>
*/
protected String doSoundRequest(final InputStream voiceStream, final String queryData)
throws MalformedURLException, AIServiceException {
return doSoundRequest(voiceStream, queryData, null, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Method extracted for testing purposes
*
* @param voiceStream Cannot be <code>null</code>
* @param queryData Cannot be <code>null</code>
*/
protected String doSoundRequest(final InputStream voiceStream, final String queryData,
final Map<String, String> additionalHeaders)
throws MalformedURLException, AIServiceException {
return doSoundRequest(voiceStream, queryData, additionalHeaders, UNDEFINED_SERVICE_CONTEXT);
}
/**
* Method extracted for testing purposes
*
* @param voiceStream Cannot be <code>null</code>
* @param queryData Cannot be <code>null</code>
*/
protected String doSoundRequest(final InputStream voiceStream, final String queryData,
final Map<String, String> additionalHeaders, final AIServiceContext serviceContext)
throws MalformedURLException, AIServiceException {
// TODO call doRequest method
assert voiceStream != null;
assert queryData != null;
HttpURLConnection connection = null;
HttpClient httpClient = null;
try {
final URL url = new URL(config.getQuestionUrl(getSessionId(serviceContext)));
logger.debug("Connecting to {}", url);
if (config.getProxy() != null) {
connection = (HttpURLConnection) url.openConnection(config.getProxy());
} else {
connection = (HttpURLConnection) url.openConnection();
}
connection.addRequestProperty("Authorization", "Bearer " + config.getApiKey());
connection.addRequestProperty("Accept", "application/json");
if (additionalHeaders != null) {
for (final Map.Entry<String, String> entry : additionalHeaders.entrySet()) {
connection.addRequestProperty(entry.getKey(), entry.getValue());
}
}
connection.setRequestMethod("POST");
connection.setDoInput(true);
connection.setDoOutput(true);
httpClient = new HttpClient(connection);
httpClient.setWriteSoundLog(config.isWriteSoundLog());
httpClient.connectForMultipart();
httpClient.addFormPart("request", queryData);
httpClient.addFilePart("voiceData", "voice.wav", voiceStream);
httpClient.finishMultipart();
final String response = httpClient.getResponse();
return response;
} catch (final IOException e) {
if (httpClient != null) {
final String errorString = httpClient.getErrorString();
logger.debug(errorString);
if (!StringUtils.isEmpty(errorString)) {
return errorString;
} else if (e instanceof HttpRetryException) {
final AIResponse response = new AIResponse();
final int code = ((HttpRetryException) e).responseCode();
final Status status = Status.fromResponseCode(code);
status.setErrorDetails(((HttpRetryException) e).getReason());
response.setStatus(status);
throw new AIServiceException(response);
}
}
logger.error(
"Can't make request to the API.AI service. Please, check connection settings and API.AI keys.",
e);
throw new AIServiceException(
"Can't make request to the API.AI service. Please, check connection settings and API.AI keys.",
e);
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
protected <TResponse> TResponse doRequest(final Type responseType, final String endpoint,
final String method) throws AIServiceException, BadResponseStatusException {
return doRequest(responseType, endpoint, method, (Map<String, String>) null);
}
protected <TRequest, TResponse> TResponse doRequest(final TRequest request,
final Type responseType, final String endpoint, final String method)
throws AIServiceException, BadResponseStatusException {
return doRequest(request, responseType, endpoint, method, (Map<String, String>) null);
}
protected <TResponse> TResponse doRequest(final Type responseType, final String endpoint,
final String method, final Map<String, String> additionalHeaders)
throws AIServiceException, BadResponseStatusException {
return doRequest((Object) null, responseType, endpoint, method, additionalHeaders);
}
protected <TRequest, TResponse> TResponse doRequest(final TRequest request,
final Type responseType, final String endpoint, final String method,
final Map<String, String> additionalHeaders)
throws AIServiceException, BadResponseStatusException {
assert endpoint != null;
HttpURLConnection connection = null;
try {
final URL url = new URL(endpoint);
final String queryData = request != null ? GSON.toJson(request) : null;
final String requestMethod = method != null ? method : DEFAULT_REQUEST_METHOD;
logger.debug("Request json: " + queryData);
if (config.getProxy() != null) {
connection = (HttpURLConnection) url.openConnection(config.getProxy());
} else {
connection = (HttpURLConnection) url.openConnection();
}
if (queryData != null && !REQUEST_METHOD_POST.equals(requestMethod)) {
throw new AIServiceException("Non-empty request should be sent using POST method");
}
connection.setRequestMethod(requestMethod);
if (REQUEST_METHOD_POST.equals(requestMethod)) {
connection.setDoOutput(true);
}
connection.addRequestProperty("Authorization", "Bearer " + config.getApiKey());
connection.addRequestProperty("Content-Type", "application/json; charset=utf-8");
connection.addRequestProperty("Accept", "application/json");
if (additionalHeaders != null) {
for (final Map.Entry<String, String> entry : additionalHeaders.entrySet()) {
connection.addRequestProperty(entry.getKey(), entry.getValue());
}
}
connection.connect();
if (queryData != null) {
final BufferedOutputStream outputStream =
new BufferedOutputStream(connection.getOutputStream());
IOUtils.writeAll(queryData, outputStream);
outputStream.close();
}
final InputStream inputStream = new BufferedInputStream(connection.getInputStream());
final String response = IOUtils.readAll(inputStream);
inputStream.close();
try {
AIResponse aiResponse = GSON.fromJson(response, AIResponse.class);
if (aiResponse.getStatus() != null && aiResponse.getStatus().getCode() != 200) {
throw new BadResponseStatusException(aiResponse);
}
} catch (JsonParseException e) {
// response is not in a expected format
}
return GSON.fromJson(response, responseType);
} catch (final IOException e) {
if (connection != null) {
try {
final InputStream errorStream = connection.getErrorStream();
if (errorStream != null) {
final String errorString = IOUtils.readAll(errorStream);
logger.debug(errorString);
throw new AIServiceException(errorString, e);
} else {
throw new AIServiceException("Can't connect to the api.ai service.", e);
}
} catch (final IOException ex) {
logger.warn("Can't read error response", ex);
}
}
logger.error(
"Can't make request to the API.AI service. Please, check connection settings and API access token.",
e);
throw new AIServiceException(
"Can't make request to the API.AI service. Please, check connection settings and API access token.",
e);
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
private void fillRequest(final AIRequest request, final RequestExtras requestExtras) {
assert request != null;
assert requestExtras != null;
if (requestExtras.hasContexts()) {
request.setContexts(requestExtras.getContexts());
}
if (requestExtras.hasEntities()) {
request.setEntities(requestExtras.getEntities());
}
if (requestExtras.getLocation() != null) {
request.setLocation(requestExtras.getLocation());
}
}
private String getSessionId(AIServiceContext serviceContext) {
return serviceContext != null ? serviceContext.getSessionId()
: defaultServiceContext.getSessionId();
}
private String getTimeZone(AIServiceContext serviceContext) {
TimeZone timeZone = serviceContext != null
? serviceContext.getTimeZone()
: defaultServiceContext.getTimeZone();
return (timeZone != null ? timeZone : Calendar.getInstance().getTimeZone()).getID();
}
private static class ApiActiveContextNamesResponse extends AIResponse {
private static final long serialVersionUID = 1L;
public List<String> names;
}
private static interface ApiActiveContextListResponse extends List<AIContext> {
}
private static class BadResponseStatusException extends Exception {
private static final long serialVersionUID = 1L;
private final AIResponse response;
public BadResponseStatusException(final AIResponse response) {
this.response = response;
}
}
}
| apache-2.0 |
guodroid/okwallet | wallet/src/org/bitcoinj/params/MainNetParams.java | 9192 | /*
* Copyright 2013 Google Inc.
* Copyright 2015 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.params;
import org.bitcoinj.core.*;
import org.bitcoinj.net.discovery.*;
import java.net.*;
import static com.google.common.base.Preconditions.*;
/**
* Parameters for the main production network on which people trade goods and services.
*/
public class MainNetParams extends AbstractBitcoinNetParams {
public static final int MAINNET_MAJORITY_WINDOW = 1000;
public static final int MAINNET_MAJORITY_REJECT_BLOCK_OUTDATED = 950;
public static final int MAINNET_MAJORITY_ENFORCE_BLOCK_UPGRADE = 750;
public MainNetParams() {
super();
interval = INTERVAL;
targetTimespan = TARGET_TIMESPAN;
maxTarget = Utils.decodeCompactBits(0x1d00ffffL);
dumpedPrivateKeyHeader = 128;
addressHeader = 0;
p2shHeader = 5;
acceptableAddressCodes = new int[] { addressHeader, p2shHeader };
port = 8333;
packetMagic = 0xf9beb4d9L;
bip32HeaderPub = 0x0488B21E; //The 4 byte header that serializes in base58 to "xpub".
bip32HeaderPriv = 0x0488ADE4; //The 4 byte header that serializes in base58 to "xprv"
majorityEnforceBlockUpgrade = MAINNET_MAJORITY_ENFORCE_BLOCK_UPGRADE;
majorityRejectBlockOutdated = MAINNET_MAJORITY_REJECT_BLOCK_OUTDATED;
majorityWindow = MAINNET_MAJORITY_WINDOW;
genesisBlock.setDifficultyTarget(0x1d00ffffL);
genesisBlock.setTime(1231006505L);
genesisBlock.setNonce(2083236893);
id = ID_MAINNET;
subsidyDecreaseBlockCount = 210000;
spendableCoinbaseDepth = 100;
String genesisHash = genesisBlock.getHashAsString();
checkState(genesisHash.equals("000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"),
genesisHash);
// This contains (at a minimum) the blocks which are not BIP30 compliant. BIP30 changed how duplicate
// transactions are handled. Duplicated transactions could occur in the case where a coinbase had the same
// extraNonce and the same outputs but appeared at different heights, and greatly complicated re-org handling.
// Having these here simplifies block connection logic considerably.
checkpoints.put(91722, Sha256Hash.wrap("00000000000271a2dc26e7667f8419f2e15416dc6955e5a6c6cdf3f2574dd08e"));
checkpoints.put(91812, Sha256Hash.wrap("00000000000af0aed4792b1acee3d966af36cf5def14935db8de83d6f9306f2f"));
checkpoints.put(91842, Sha256Hash.wrap("00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec"));
checkpoints.put(91880, Sha256Hash.wrap("00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721"));
checkpoints.put(200000, Sha256Hash.wrap("000000000000034a7dedef4a161fa058a2d67a173a90155f3a2fe6fc132e0ebf"));
dnsSeeds = new String[] {
"seed.bitcoin.sipa.be", // Pieter Wuille
"dnsseed.bluematt.me", // Matt Corallo
"dnsseed.bitcoin.dashjr.org", // Luke Dashjr
"seed.bitcoinstats.com", // Chris Decker
"seed.bitnodes.io", // Addy Yeow
"bitseed.xf2.org", // Jeff Garzik
"seed.bitcoin.jonasschnelli.ch",// Jonas Schnelli
"bitcoin.bloqseeds.net", // Bloq
};
httpSeeds = new HttpDiscovery.Details[] {
// Andreas Schildbach
new HttpDiscovery.Details(
ECKey.fromPublicOnly(Utils.HEX.decode("0238746c59d46d5408bf8b1d0af5740fe1a6e1703fcb56b2953f0b965c740d256f")),
URI.create("http://httpseed.bitcoin.schildbach.de/peers")
)
};
addrSeeds = new int[] {
0x1ddb1032, 0x6242ce40, 0x52d6a445, 0x2dd7a445, 0x8a53cd47, 0x73263750, 0xda23c257, 0xecd4ed57,
0x0a40ec59, 0x75dce160, 0x7df76791, 0x89370bad, 0xa4f214ad, 0x767700ae, 0x638b0418, 0x868a1018,
0xcd9f332e, 0x0129653e, 0xcc92dc3e, 0x96671640, 0x56487e40, 0x5b66f440, 0xb1d01f41, 0xf1dc6041,
0xc1d12b42, 0x86ba1243, 0x6be4df43, 0x6d4cef43, 0xd18e0644, 0x1ab0b344, 0x6584a345, 0xe7c1a445,
0x58cea445, 0xc5daa445, 0x21dda445, 0x3d3b5346, 0x13e55347, 0x1080d24a, 0x8e611e4b, 0x81518e4b,
0x6c839e4b, 0xe2ad0a4c, 0xfbbc0a4c, 0x7f5b6e4c, 0x7244224e, 0x1300554e, 0x20690652, 0x5a48b652,
0x75c5c752, 0x4335cc54, 0x340fd154, 0x87c07455, 0x087b2b56, 0x8a133a57, 0xac23c257, 0x70374959,
0xfb63d45b, 0xb9a1685c, 0x180d765c, 0x674f645d, 0x04d3495e, 0x1de44b5e, 0x4ee8a362, 0x0ded1b63,
0xc1b04b6d, 0x8d921581, 0x97b7ea82, 0x1cf83a8e, 0x91490bad, 0x09dc75ae, 0x9a6d79ae, 0xa26d79ae,
0x0fd08fae, 0x0f3e3fb2, 0x4f944fb2, 0xcca448b8, 0x3ecd6ab8, 0xa9d5a5bc, 0x8d0119c1, 0x045997d5,
0xca019dd9, 0x0d526c4d, 0xabf1ba44, 0x66b1ab55, 0x1165f462, 0x3ed7cbad, 0xa38fae6e, 0x3bd2cbad,
0xd36f0547, 0x20df7840, 0x7a337742, 0x549f8e4b, 0x9062365c, 0xd399f562, 0x2b5274a1, 0x8edfa153,
0x3bffb347, 0x7074bf58, 0xb74fcbad, 0x5b5a795b, 0x02fa29ce, 0x5a6738d4, 0xe8a1d23e, 0xef98c445,
0x4b0f494c, 0xa2bc1e56, 0x7694ad63, 0xa4a800c3, 0x05fda6cd, 0x9f22175e, 0x364a795b, 0x536285d5,
0xac44c9d4, 0x0b06254d, 0x150c2fd4, 0x32a50dcc, 0xfd79ce48, 0xf15cfa53, 0x66c01e60, 0x6bc26661,
0xc03b47ae, 0x4dda1b81, 0x3285a4c1, 0x883ca96d, 0x35d60a4c, 0xdae09744, 0x2e314d61, 0x84e247cf,
0x6c814552, 0x3a1cc658, 0x98d8f382, 0xe584cb5b, 0x15e86057, 0x7b01504e, 0xd852dd48, 0x56382f56,
0x0a5df454, 0xa0d18d18, 0x2e89b148, 0xa79c114c, 0xcbdcd054, 0x5523bc43, 0xa9832640, 0x8a066144,
0x3894c3bc, 0xab76bf58, 0x6a018ac1, 0xfebf4f43, 0x2f26c658, 0x31102f4e, 0x85e929d5, 0x2a1c175e,
0xfc6c2cd1, 0x27b04b6d, 0xdf024650, 0x161748b8, 0x28be6580, 0x57be6580, 0x1cee677a, 0xaa6bb742,
0x9a53964b, 0x0a5a2d4d, 0x2434c658, 0x9a494f57, 0x1ebb0e48, 0xf610b85d, 0x077ecf44, 0x085128bc,
0x5ba17a18, 0x27ca1b42, 0xf8a00b56, 0xfcd4c257, 0xcf2fc15e, 0xd897e052, 0x4cada04f, 0x2f35f6d5,
0x382ce8c9, 0xe523984b, 0x3f946846, 0x60c8be43, 0x41da6257, 0xde0be142, 0xae8a544b, 0xeff0c254,
0x1e0f795b, 0xaeb28890, 0xca16acd9, 0x1e47ddd8, 0x8c8c4829, 0xd27dc747, 0xd53b1663, 0x4096b163,
0x9c8dd958, 0xcb12f860, 0x9e79305c, 0x40c1a445, 0x4a90c2bc, 0x2c3a464d, 0x2727f23c, 0x30b04b6d,
0x59024cb8, 0xa091e6ad, 0x31b04b6d, 0xc29d46a6, 0x63934fb2, 0xd9224dbe, 0x9f5910d8, 0x7f530a6b,
0x752e9c95, 0x65453548, 0xa484be46, 0xce5a1b59, 0x710e0718, 0x46a13d18, 0xdaaf5318, 0xc4a8ff53,
0x87abaa52, 0xb764cf51, 0xb2025d4a, 0x6d351e41, 0xc035c33e, 0xa432c162, 0x61ef34ae, 0xd16fddbc,
0x0870e8c1, 0x3070e8c1, 0x9c71e8c1, 0xa4992363, 0x85a1f663, 0x4184e559, 0x18d96ed8, 0x17b8dbd5,
0x60e7cd18, 0xe5ee104c, 0xab17ac62, 0x1e786e1b, 0x5d23b762, 0xf2388fae, 0x88270360, 0x9e5b3d80,
0x7da518b2, 0xb5613b45, 0x1ad41f3e, 0xd550854a, 0x8617e9a9, 0x925b229c, 0xf2e92542, 0x47af0544,
0x73b5a843, 0xb9b7a0ad, 0x03a748d0, 0x0a6ff862, 0x6694df62, 0x3bfac948, 0x8e098f4f, 0x746916c3,
0x02f38e4f, 0x40bb1243, 0x6a54d162, 0x6008414b, 0xa513794c, 0x514aa343, 0x63781747, 0xdbb6795b,
0xed065058, 0x42d24b46, 0x1518794c, 0x9b271681, 0x73e4ffad, 0x0654784f, 0x438dc945, 0x641846a6,
0x2d1b0944, 0x94b59148, 0x8d369558, 0xa5a97662, 0x8b705b42, 0xce9204ae, 0x8d584450, 0x2df61555,
0xeebff943, 0x2e75fb4d, 0x3ef8fc57, 0x9921135e, 0x8e31042e, 0xb5afad43, 0x89ecedd1, 0x9cfcc047,
0x8fcd0f4c, 0xbe49f5ad, 0x146a8d45, 0x98669ab8, 0x98d9175e, 0xd1a8e46d, 0x839a3ab8, 0x40a0016c,
0x6d27c257, 0x977fffad, 0x7baa5d5d, 0x1213be43, 0xb167e5a9, 0x640fe8ca, 0xbc9ea655, 0x0f820a4c,
0x0f097059, 0x69ac957c, 0x366d8453, 0xb1ba2844, 0x8857f081, 0x70b5be63, 0xc545454b, 0xaf36ded1,
0xb5a4b052, 0x21f062d1, 0x72ab89b2, 0x74a45318, 0x8312e6bc, 0xb916965f, 0x8aa7c858, 0xfe7effad,
};
}
private static MainNetParams instance;
public static synchronized MainNetParams get() {
if (instance == null) {
instance = new MainNetParams();
}
return instance;
}
@Override
public String getPaymentProtocolId() {
return PAYMENT_PROTOCOL_ID_MAINNET;
}
}
| apache-2.0 |
xschildw/Synapse-Repository-Services | services/workers/src/main/java/org/sagebionetworks/message/workers/MessageToUserWorker.java | 2573 | package org.sagebionetworks.message.workers;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.sagebionetworks.asynchronous.workers.changes.ChangeMessageDrivenRunner;
import org.sagebionetworks.cloudwatch.WorkerLogger;
import org.sagebionetworks.common.util.progress.ProgressCallback;
import org.sagebionetworks.repo.manager.MessageManager;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.message.ChangeMessage;
import org.sagebionetworks.repo.model.ses.QuarantinedEmailException;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.workers.util.aws.message.RecoverableMessageException;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The worker that processes messages sending messages to users
*
*/
public class MessageToUserWorker implements ChangeMessageDrivenRunner {
static private Logger log = LogManager.getLogger(MessageToUserWorker.class);
@Autowired
private MessageManager messageManager;
@Autowired
private WorkerLogger workerLogger;
@Override
public void run(final ProgressCallback progressCallback, final ChangeMessage change)
throws RecoverableMessageException, Exception {
// We only care about MESSAGE messages here
if (ObjectType.MESSAGE == change.getObjectType()) {
try {
List<String> errors;
switch (change.getChangeType()) {
case CREATE:
errors = messageManager.processMessage(change.getObjectId(), progressCallback);
break;
default:
throw new IllegalArgumentException("Unknown change type: "
+ change.getChangeType());
}
if (errors.size() > 0) {
messageManager.sendDeliveryFailureEmail(
change.getObjectId(), errors);
}
} catch (QuarantinedEmailException e) {
log.error("The message will be returned as processed and removed from the queue: " + e.getMessage(), e);
workerLogger.logWorkerFailure(this.getClass(), change, e, false);
} catch (NotFoundException e) {
log.info("NotFound: "
+ e.getMessage()
+ ". The message will be returned as processed and removed from the queue");
workerLogger.logWorkerFailure(this.getClass(), change, e, false);
} catch (Throwable e) {
// Something went wrong and we did not process the message
log.error("Failed to process message", e);
workerLogger.logWorkerFailure(this.getClass(), change, e, true);
// This is the wrong thing to do with a throwable.
throw new RecoverableMessageException();
}
}
}
}
| apache-2.0 |
loicalbertin/alien4cloud | alien4cloud-security/src/main/java/alien4cloud/security/model/User.java | 2537 | package alien4cloud.security.model;
import java.util.Set;
import lombok.Getter;
import lombok.Setter;
import org.elasticsearch.annotation.BooleanField;
import org.elasticsearch.annotation.ESObject;
import org.elasticsearch.annotation.Id;
import org.elasticsearch.annotation.StringField;
import org.elasticsearch.annotation.query.TermFilter;
import org.elasticsearch.annotation.query.TermsFacet;
import org.elasticsearch.mapping.IndexType;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.social.security.SocialUserDetails;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.google.common.collect.Sets;
@ESObject
@Getter
@Setter
@JsonInclude(Include.NON_NULL)
@SuppressWarnings("PMD.UnusedPrivateField")
public class User implements SocialUserDetails {
private static final long serialVersionUID = 1L;
@Id
private String username;
private String password;
private String lastName;
private String firstName;
@StringField(includeInAll = false, indexType = IndexType.no)
private String email;
@StringField(indexType = IndexType.not_analyzed)
@TermsFacet
private Set<String> groups;
private Set<String> groupRoles;
@TermFilter
private String[] roles;
@BooleanField(includeInAll = false, index = IndexType.no)
private boolean internalDirectory;
@BooleanField(includeInAll = false, index = IndexType.no)
private boolean accountNonExpired = true;
@BooleanField(includeInAll = false, index = IndexType.no)
private boolean accountNonLocked = true;
@BooleanField(includeInAll = false, index = IndexType.no)
private boolean credentialsNonExpired = true;
@BooleanField(includeInAll = false, index = IndexType.no)
private boolean enabled = true;
@JsonIgnore
@Override
public Set<SimpleGrantedAuthority> getAuthorities() {
Set<SimpleGrantedAuthority> authorities = Sets.newHashSet();
if (roles != null) {
for (String role : roles) {
authorities.add(new SimpleGrantedAuthority(role));
}
}
if (groupRoles != null) {
for (String role : groupRoles) {
authorities.add(new SimpleGrantedAuthority(role));
}
}
return authorities;
}
@JsonIgnore
@Override
public String getUserId() {
return this.username;
}
}
| apache-2.0 |
raksha-rao/gluster-ovirt | backend/manager/modules/restapi/jaxrs/src/test/java/org/ovirt/engine/api/restapi/resource/AbstractBackendResourceTest.java | 19224 | package org.ovirt.engine.api.restapi.resource;
import static org.ovirt.engine.api.restapi.test.util.TestHelper.eqActionParams;
import static org.ovirt.engine.api.restapi.test.util.TestHelper.eqQueryParams;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.easymock.IAnswer;
import org.easymock.IMocksControl;
import org.easymock.classextension.EasyMock;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.ovirt.engine.api.common.invocation.Current;
import org.ovirt.engine.api.common.security.auth.Principal;
import org.ovirt.engine.api.model.BaseResource;
import org.ovirt.engine.api.model.Fault;
import org.ovirt.engine.api.model.Link;
import org.ovirt.engine.core.common.action.VdcActionParametersBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.action.VdcReturnValueBase;
import org.ovirt.engine.core.common.businessentities.AsyncTaskStatus;
import org.ovirt.engine.core.common.interfaces.BackendLocal;
import org.ovirt.engine.core.common.interfaces.ErrorTranslator;
import org.ovirt.engine.core.common.interfaces.SearchType;
import org.ovirt.engine.core.common.queries.GetTasksStatusesByTasksIDsParameters;
import org.ovirt.engine.core.common.queries.SearchParameters;
import org.ovirt.engine.core.common.queries.VdcQueryParametersBase;
import org.ovirt.engine.core.common.queries.VdcQueryReturnValue;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.api.restapi.logging.MessageBundle;
import org.ovirt.engine.api.restapi.types.Mapper;
import org.ovirt.engine.api.restapi.types.MappingLocator;
import org.ovirt.engine.api.restapi.util.SessionHelper;
import static org.ovirt.engine.api.restapi.test.util.TestHelper.eqSearchParams;
public abstract class AbstractBackendResourceTest<R extends BaseResource, Q /* extends IVdcQueryable */>
extends Assert {
protected static final Guid[] GUIDS = { new Guid("00000000-0000-0000-0000-000000000000"),
new Guid("11111111-1111-1111-1111-111111111111"),
new Guid("22222222-2222-2222-2222-222222222222"),
new Guid("33333333-3333-3333-3333-333333333333") };
protected static final Guid NON_EXISTANT_GUID = new Guid("99999999-9999-9999-9999-999999999999");
protected static final String[] NAMES = { "sedna", "eris", "orcus" };
protected static final String[] DESCRIPTIONS = { "top notch entity", "a fine example",
"state of the art" };
protected static final String URI_ROOT = "http://localhost:8088";
protected static final String BASE_PATH = "/api";
protected static final String URI_BASE = URI_ROOT + BASE_PATH;
protected static final String BUNDLE_PATH = "org/ovirt/engine/api/restapi/logging/Messages";
protected static final String CANT_DO = "circumstances outside our control";
protected static final String FAILURE = "a fine mess";
protected static final String BACKEND_FAILED_SERVER_LOCALE = "Ruckenende ist kaput";
protected static final String BACKEND_FAILED_CLIENT_LOCALE = "Theip ar an obair";
protected static final String INCOMPLETE_PARAMS_REASON_SERVER_LOCALE = "Unvollstandig Parameter";
protected static final String INCOMPLETE_PARAMS_DETAIL_SERVER_LOCALE = " erforderlich fur ";
protected static final Locale CLIENT_LOCALE = new Locale("ga", "IE");
protected static int SERVER_ERROR = 500;
protected static int BAD_REQUEST = 400;
protected static final String USER = "Aladdin";
protected static final String SECRET = "open sesame";
protected static final String DOMAIN = "Maghreb.Maghreb.Maghreb.com";
protected BackendLocal backend;
protected Current current;
protected Principal principal;
protected SessionHelper sessionHelper;
protected MappingLocator mapperLocator;
protected Locale locale;
protected HttpHeaders httpHeaders;
protected List<Locale> locales;
protected List<String> accepts;
protected MessageBundle messageBundle;
protected IMocksControl control;
@Before
public void setUp() {
control = EasyMock.createNiceControl();
backend = control.createMock(BackendLocal.class);
current = control.createMock(Current.class);
sessionHelper = new SessionHelper();
sessionHelper.setCurrent(current);
principal = new Principal(USER, SECRET, DOMAIN);
expect(current.get(Principal.class)).andReturn(principal).anyTimes();
httpHeaders = control.createMock(HttpHeaders.class);
locales = new ArrayList<Locale>();
expect(httpHeaders.getAcceptableLanguages()).andReturn(locales).anyTimes();
accepts = new ArrayList<String>();
expect(httpHeaders.getRequestHeader("Accept")).andReturn(accepts).anyTimes();
mapperLocator = new MappingLocator();
mapperLocator.populate();
locale = Locale.getDefault();
Locale.setDefault(Locale.GERMANY);
messageBundle = new MessageBundle();
messageBundle.setPath(BUNDLE_PATH);
messageBundle.populate();
init();
}
protected <F, T> Mapper<F, T> getMapper(Class<F> from, Class<T> to) {
return mapperLocator.getMapper(from, to);
}
@After
public void tearDown() {
Locale.setDefault(locale);
control.verify();
}
protected void initResource(AbstractBackendResource<R, Q> resource) {
resource.setBackend(backend);
resource.setMappingLocator(mapperLocator);
resource.setSessionHelper(sessionHelper);
resource.setMessageBundle(messageBundle);
resource.setHttpHeaders(httpHeaders);
}
protected abstract void init();
protected abstract Q getEntity(int index);
protected void verifyModel(R model, int index) {
assertEquals(GUIDS[index].toString(), model.getId());
assertEquals(NAMES[index], model.getName());
assertEquals(DESCRIPTIONS[index], model.getDescription());
verifyLinks(model);
}
protected UriInfo setUpBasicUriExpectations() {
UriInfo uriInfo = control.createMock(UriInfo.class);
expect(uriInfo.getBaseUri()).andReturn(URI.create(URI_BASE)).anyTimes();
return uriInfo;
}
protected <E> void setUpGetEntityExpectations(VdcQueryType query,
Class<? extends VdcQueryParametersBase> clz, String[] names, Object[] values, E entity)
throws Exception {
VdcQueryReturnValue queryResult = control.createMock(VdcQueryReturnValue.class);
expect(backend.RunQuery(eq(query), eqQueryParams(clz, addSession(names), addSession(values)))).andReturn(
queryResult);
expect(queryResult.getSucceeded()).andReturn(true).anyTimes();
expect(queryResult.getReturnValue()).andReturn(entity).anyTimes();
}
protected <E> void setUpGetEntityExpectations(String query,
SearchType type,
E entity) throws Exception {
VdcQueryReturnValue queryResult = control.createMock(VdcQueryReturnValue.class);
SearchParameters params = new SearchParameters(query, type);
expect(backend.RunQuery(eq(VdcQueryType.Search),
eqSearchParams(params))).andReturn(queryResult);
expect(queryResult.getSucceeded()).andReturn(true).anyTimes();
List<E> entities = new ArrayList<E>();
entities.add(entity);
expect(queryResult.getReturnValue()).andReturn(entities).anyTimes();
}
protected void setUpEntityQueryExpectations(VdcQueryType query,
Class<? extends VdcQueryParametersBase> queryClass,
String[] queryNames,
Object[] queryValues,
Object queryReturn) {
setUpEntityQueryExpectations(query, queryClass, queryNames, queryValues, queryReturn, null);
}
protected void setUpEntityQueryExpectations(VdcQueryType query,
Class<? extends VdcQueryParametersBase> queryClass,
String[] queryNames,
Object[] queryValues,
Object queryReturn,
Object failure) {
VdcQueryReturnValue queryResult = control.createMock(VdcQueryReturnValue.class);
expect(queryResult.getSucceeded()).andReturn(failure == null).anyTimes();
if (failure == null) {
expect(queryResult.getReturnValue()).andReturn(queryReturn).anyTimes();
} else {
if (failure instanceof String) {
expect(queryResult.getExceptionString()).andReturn((String) failure).anyTimes();
setUpL10nExpectations((String)failure);
} else if (failure instanceof Exception) {
expect(queryResult.getExceptionString()).andThrow((Exception) failure).anyTimes();
}
}
expect(backend.RunQuery(eq(query),
eqQueryParams(queryClass,
addSession(queryNames),
addSession(queryValues)))).andReturn(queryResult);
}
protected UriInfo setUpActionExpectations(VdcActionType task,
Class<? extends VdcActionParametersBase> clz, String[] names, Object[] values,
boolean canDo, boolean success) {
return setUpActionExpectations(task, clz, names, values, canDo, success, null, true);
}
protected UriInfo setUpActionExpectations(VdcActionType task,
Class<? extends VdcActionParametersBase> clz, String[] names, Object[] values,
boolean canDo, boolean success, boolean reply) {
return setUpActionExpectations(task, clz, names, values, canDo, success, null, reply);
}
protected UriInfo setUpActionExpectations(VdcActionType task,
Class<? extends VdcActionParametersBase> clz, String[] names, Object[] values,
boolean canDo, boolean success, Object taskReturn, boolean replay) {
return setUpActionExpectations(task, clz, names, values, canDo, success, taskReturn, null, replay);
}
protected UriInfo setUpActionExpectations(VdcActionType task,
Class<? extends VdcActionParametersBase> clz,
String[] names,
Object[] values,
boolean canDo,
boolean success,
Object taskReturn,
String baseUri,
boolean replay) {
return setUpActionExpectations(task, clz, names, values, canDo, success, taskReturn, null, null, baseUri, replay);
}
protected UriInfo setUpActionExpectations(VdcActionType task,
Class<? extends VdcActionParametersBase> clz,
String[] names,
Object[] values,
boolean canDo,
boolean success,
Object taskReturn,
ArrayList<Guid> asyncTasks,
ArrayList<AsyncTaskStatus> asyncStatuses,
String baseUri,
boolean replay) {
VdcReturnValueBase result = control.createMock(VdcReturnValueBase.class);
expect(result.getCanDoAction()).andReturn(canDo).anyTimes();
if (canDo) {
expect(result.getSucceeded()).andReturn(success).anyTimes();
if (success) {
if (taskReturn != null) {
expect(result.getActionReturnValue()).andReturn(taskReturn).anyTimes();
}
} else {
expect(result.getExecuteFailedMessages()).andReturn(asList(FAILURE)).anyTimes();
setUpL10nExpectations(asList(FAILURE));
}
} else {
expect(result.getCanDoActionMessages()).andReturn(asList(CANT_DO)).anyTimes();
setUpL10nExpectations(asList(CANT_DO));
}
expect(backend.RunAction(eq(task), eqActionParams(clz, addSession(names), addSession(values)))).andReturn(result);
expect(result.getHasAsyncTasks()).andReturn(asyncTasks != null).anyTimes();
if (asyncTasks != null) {
expect(result.getTaskIdList()).andReturn(asyncTasks).anyTimes();
VdcQueryReturnValue monitorResult = control.createMock(VdcQueryReturnValue.class);
expect(monitorResult.getSucceeded()).andReturn(success).anyTimes();
expect(monitorResult.getReturnValue()).andReturn(asyncStatuses).anyTimes();
expect(backend.RunQuery(eq(VdcQueryType.GetTasksStatusesByTasksIDs),
eqQueryParams(GetTasksStatusesByTasksIDsParameters.class,
addSession(new String[]{}),
addSession(new Object[]{})))).andReturn(monitorResult);
}
UriInfo uriInfo = setUpBasicUriExpectations();
if (baseUri != null) {
expect(uriInfo.getPath()).andReturn(baseUri).anyTimes();
}
if (replay) {
control.replay();
}
return uriInfo;
}
protected void setUpL10nExpectations(String error) {
ErrorTranslator translator = control.createMock(ErrorTranslator.class);
IAnswer<String> answer = new IAnswer<String>() {
public String answer() {
return EasyMock.getCurrentArguments() != null && EasyMock.getCurrentArguments().length > 0
? mockl10n((String)EasyMock.getCurrentArguments()[0])
: null;
}
};
if (!locales.isEmpty()) {
expect(translator.TranslateErrorTextSingle(eq(error), eq(locales.get(0)))).andAnswer(answer).anyTimes();
} else {
expect(translator.TranslateErrorTextSingle(eq(error))).andAnswer(answer).anyTimes();
}
expect(backend.getErrorsTranslator()).andReturn(translator).anyTimes();
}
protected void setUpL10nExpectations(ArrayList<String> errors) {
ErrorTranslator errorTranslator = control.createMock(ErrorTranslator.class);
if (!locales.isEmpty()) {
expect(errorTranslator.TranslateErrorText(eq(errors), eq(locales.get(0)))).andReturn(mockl10n(errors)).anyTimes();
} else {
expect(errorTranslator.TranslateErrorText(eq(errors))).andReturn(mockl10n(errors)).anyTimes();
}
expect(backend.getErrorsTranslator()).andReturn(errorTranslator);
}
protected List<String> mockl10n(List<String> errors) {
ArrayList<String> ret = new ArrayList<String>();
for (String error : errors) {
ret.add(mockl10n(error));
}
return ret;
}
protected String mockl10n(String s) {
return s.startsWith("l10n...") ? s : "l10n..." + s;
}
protected void verifyRemove(Response response) {
assertNotNull(response);
assertEquals(response.getStatus(), Response.Status.OK.getStatusCode());
}
protected void verifyLinks(BaseResource model) {
assertNotNull(model.getHref());
assertTrue(model.getHref().startsWith("/api"));
for (Link link : model.getLinks()) {
assertTrue(link.getHref().startsWith("/api"));
}
}
protected void verifyFault(WebApplicationException wae, String detail) {
verifyFault(wae, BACKEND_FAILED_SERVER_LOCALE, asList(mockl10n(detail)).toString(), BAD_REQUEST);
}
protected void verifyFault(WebApplicationException wae, String reason, String detail, int status) {
assertEquals(status, wae.getResponse().getStatus());
assertTrue(wae.getResponse().getEntity() instanceof Fault);
Fault fault = (Fault) wae.getResponse().getEntity();
assertEquals(reason, fault.getReason());
assertEquals(detail, fault.getDetail());
}
protected void verifyFault(WebApplicationException wae, String reason, Throwable t) {
assertEquals(SERVER_ERROR, wae.getResponse().getStatus());
assertTrue(wae.getResponse().getEntity() instanceof Fault);
Fault fault = (Fault) wae.getResponse().getEntity();
assertEquals(reason, fault.getReason());
assertNotNull(fault.getDetail());
assertTrue("expected detail to include: " + t.getMessage(),
fault.getDetail().indexOf(t.getMessage()) != -1);
}
protected void verifyFault(Response response, String detail) {
assertEquals(BAD_REQUEST, response.getStatus());
assertTrue(response.getEntity() instanceof Fault);
Fault fault = (Fault) response.getEntity();
assertEquals(BACKEND_FAILED_SERVER_LOCALE, fault.getReason());
assertEquals(asList(mockl10n(detail)).toString(), fault.getDetail());
}
protected void verifyIncompleteException(WebApplicationException wae, String type, String method, String... fields) {
assertEquals(400, wae.getResponse().getStatus());
Fault fault = (Fault)wae.getResponse().getEntity();
assertNotNull(fault);
assertEquals(INCOMPLETE_PARAMS_REASON_SERVER_LOCALE, fault.getReason());
assertEquals(type + " " + Arrays.asList(fields) + INCOMPLETE_PARAMS_DETAIL_SERVER_LOCALE + method, fault.getDetail());
}
protected void verifyNotFoundException(WebApplicationException wae) {
assertEquals(404, wae.getResponse().getStatus());
}
protected <T> ArrayList<T> asList(T element) {
ArrayList<T> list = new ArrayList<T>();
list.add(element);
return list;
}
protected String[] addSession(String... names) {
String[] ret = new String[names.length + 1];
for (int i = 0 ; i < names.length ; i++) {
ret[i] = names[i];
}
ret[names.length] = "SessionId";
return ret;
}
protected Object[] addSession(Object... values) {
Object[] ret = new Object[values.length + 1];
for (int i = 0 ; i < values.length ; i++) {
ret[i] = values[i];
}
ret[values.length] = sessionHelper.getSessionId(principal);
return ret;
}
}
| apache-2.0 |
binfalse/incubator-taverna-language | taverna-scufl2-api/src/test/java/org/apache/taverna/scufl2/api/TestExampleWorkflow.java | 1046 | package org.apache.taverna.scufl2.api;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.junit.Test;
public class TestExampleWorkflow extends ExampleWorkflow {
@Test
public void makeflowBundle() throws Exception {
makeWorkflowBundle();
// TODO: Check fields
}
}
| apache-2.0 |
pentaho/pentaho-hadoop-shims | common-fragment-V1/src/test/java/org/pentaho/big/data/api/jdbc/impl/JdbcUrlImplTest.java | 5052 | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2019 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.big.data.api.jdbc.impl;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.pentaho.hadoop.shim.api.cluster.NamedClusterService;
import org.pentaho.hadoop.shim.api.cluster.NamedCluster;
import org.pentaho.metastore.api.IMetaStore;
import org.pentaho.metastore.api.exceptions.MetaStoreException;
import org.pentaho.osgi.metastore.locator.api.MetastoreLocator;
import java.net.URISyntaxException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Created by bryan on 4/14/16.
*/
@RunWith( MockitoJUnitRunner.class )
public class JdbcUrlImplTest {
@Mock NamedClusterService namedClusterService;
@Mock MetastoreLocator metastoreLocator;
@Test
public void testNoParameterRoundTrip() throws URISyntaxException {
String host = "my.hadoop.cluster";
String url = "jdbc:hive2://" + host + ":999/default";
JdbcUrlImpl jdbcUrl = new JdbcUrlImpl( url, namedClusterService, metastoreLocator );
assertEquals( url, jdbcUrl.toString() );
assertEquals( host, jdbcUrl.getHost() );
}
@Test( expected = URISyntaxException.class )
public void testNoJdbcPrefix() throws URISyntaxException {
new JdbcUrlImpl( "hive2://my.hadoop.cluster:999/default", namedClusterService, metastoreLocator );
}
@Test
public void testInitialParameter() throws URISyntaxException {
String auth = "Auth";
String test = "test";
String url = "jdbc:hive2://my.hadoop.cluster:999/default;" + auth + "=" + test;
JdbcUrlImpl jdbcUrl = new JdbcUrlImpl( url, namedClusterService, metastoreLocator );
assertEquals( url, jdbcUrl.toString() );
assertEquals( test, jdbcUrl.getQueryParam( auth ) );
}
@Test
public void testSetParameter() throws URISyntaxException {
String auth = "Auth";
String test = "test";
String baseUrl = "jdbc:hive2://my.hadoop.cluster:999/default";
String expectedUrl = baseUrl + ";" + auth + "=" + test;
JdbcUrlImpl jdbcUrl = new JdbcUrlImpl( baseUrl, namedClusterService, metastoreLocator );
jdbcUrl.setQueryParam( auth, test );
assertEquals( expectedUrl, jdbcUrl.toString() );
assertEquals( test, jdbcUrl.getQueryParam( auth ) );
}
@Test
public void testGetNamedClusterNullMetastore() throws MetaStoreException, URISyntaxException {
String myCluster = "myCluster";
String url = "jdbc:hive2://my.hadoop.cluster:999/default;" + JdbcUrlImpl.PENTAHO_NAMED_CLUSTER + "=" + myCluster;
when( metastoreLocator.getMetastore() ).thenReturn( null );
JdbcUrlImpl jdbcUrl = new JdbcUrlImpl( url, namedClusterService, metastoreLocator );
assertEquals( myCluster, jdbcUrl.getQueryParam( JdbcUrlImpl.PENTAHO_NAMED_CLUSTER ) );
assertNull( jdbcUrl.getNamedCluster() );
}
@Test
public void testGetNamedClusterNullQueryParam() throws MetaStoreException, URISyntaxException {
String url = "jdbc:hive2://my.hadoop.cluster:999/default";
when( metastoreLocator.getMetastore() ).thenReturn( mock( IMetaStore.class ) );
JdbcUrlImpl jdbcUrl = new JdbcUrlImpl( url, namedClusterService, metastoreLocator );
assertNull( jdbcUrl.getQueryParam( JdbcUrlImpl.PENTAHO_NAMED_CLUSTER ) );
assertNull( jdbcUrl.getNamedCluster() );
}
@Test
public void testGetNamedClusterSuccess() throws MetaStoreException, URISyntaxException {
String myCluster = "myCluster";
String url = "jdbc:hive2://my.hadoop.cluster:999/default;" + JdbcUrlImpl.PENTAHO_NAMED_CLUSTER + "=" + myCluster;
IMetaStore iMetaStore = mock( IMetaStore.class );
NamedCluster namedCluster = mock( NamedCluster.class );
when( namedClusterService.read( myCluster, iMetaStore ) ).thenReturn( namedCluster );
when( metastoreLocator.getMetastore() ).thenReturn( iMetaStore );
JdbcUrlImpl jdbcUrl = new JdbcUrlImpl( url, namedClusterService, metastoreLocator );
assertEquals( myCluster, jdbcUrl.getQueryParam( JdbcUrlImpl.PENTAHO_NAMED_CLUSTER ) );
assertEquals( namedCluster, jdbcUrl.getNamedCluster() );
}
}
| apache-2.0 |
dsyer/spring-cloud-stream | spring-cloud-stream-rxjava/src/main/java/org/springframework/cloud/stream/annotation/rxjava/SubjectMessageHandler.java | 5349 | /*
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.annotation.rxjava;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import rx.Observable;
import rx.Subscription;
import rx.functions.Action0;
import rx.functions.Action1;
import rx.subjects.PublishSubject;
import rx.subjects.SerializedSubject;
import rx.subjects.Subject;
import org.springframework.context.SmartLifecycle;
import org.springframework.integration.handler.AbstractMessageProducingHandler;
import org.springframework.messaging.Message;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
/**
* Adapts the item at a time delivery of a {@link org.springframework.messaging.MessageHandler}
* by delegating processing to a {@link Observable}.
* <p/>
* The outputStream of the processor is used to create a message and send it to the output channel. If the
* input channel and output channel are connected to the {@link org.springframework.cloud.stream.binder.Binder},
* then data delivered to the input stream via a call to onNext is invoked on the dispatcher thread of the binder
* and sending a message to the output channel will involve IO operations on the binder.
* <p/>
* The implementation uses a SerializedSubject. This has the advantage that the state of the Observabale
* can be shared across all the incoming dispatcher threads that are invoking onNext. It has the disadvantage
* that processing and sending to the output channel will execute serially on one of the dispatcher threads.
* <p/>
* The use of this handler makes for a very natural first experience when processing data. For example given
* the stream <code></code>http | rxjava-processor | log</code> where the <code>rxjava-processor</code> does a
* <code>buffer(5)</code> and then produces a single value. Sending 10 messages to the http source will
* result in 2 messages in the log, no matter how many dispatcher threads are used.
* <p/>
* You can modify what thread the outputStream subscriber, which does the send to the output channel,
* will use by explicitly calling <code>observeOn</code> before returning the outputStream from your processor.
* <p/>
* All error handling is the responsibility of the processor implementation.
*
* @author Mark Pollack
* @author Ilayaperumal Gopinathan
* @author Marius Bogoevici
*/
@SuppressWarnings({"unchecked", "rawtypes"})
@Deprecated
public class SubjectMessageHandler extends AbstractMessageProducingHandler implements SmartLifecycle {
private final Log logger = LogFactory.getLog(getClass());
@SuppressWarnings("rawtypes")
private final RxJavaProcessor processor;
private volatile Subject subject;
private volatile Subscription subscription;
private volatile boolean running;
@SuppressWarnings({"unchecked", "rawtypes"})
public SubjectMessageHandler(RxJavaProcessor processor) {
Assert.notNull(processor, "RxJava processor must not be null.");
this.processor = processor;
}
@Override
public synchronized void start() {
if (!this.running) {
this.subject = new SerializedSubject(PublishSubject.create());
Observable<?> outputStream = this.processor.process(this.subject);
this.subscription = outputStream.subscribe(new Action1<Object>() {
@Override
public void call(Object outputObject) {
if (ClassUtils.isAssignable(Message.class, outputObject.getClass())) {
getOutputChannel().send((Message) outputObject);
}
else {
getOutputChannel().send(MessageBuilder.withPayload(outputObject).build());
}
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable throwable) {
SubjectMessageHandler.this.logger.error(throwable.getMessage(), throwable);
}
}, new Action0() {
@Override
public void call() {
SubjectMessageHandler.this.logger
.info("Subscription close for [" + SubjectMessageHandler.this.subscription + "]");
}
});
this.running = true;
}
}
@Override
public synchronized boolean isRunning() {
return this.running;
}
@Override
public boolean isAutoStartup() {
return false;
}
@Override
public void stop(Runnable callback) {
if (this.running) {
stop();
if (callback != null) {
callback.run();
}
}
}
@Override
public int getPhase() {
return 0;
}
@Override
protected void handleMessageInternal(Message<?> message) throws Exception {
this.subject.onNext(message.getPayload());
}
@Override
public synchronized void stop() {
if (this.running) {
this.subject.onCompleted();
this.subscription.unsubscribe();
this.subscription = null;
this.subject = null;
this.running = false;
}
}
}
| apache-2.0 |
raksha-rao/gluster-ovirt | backend/manager/modules/restapi/interface/common/jaxrs/src/main/java/org/ovirt/engine/api/common/resource/DefaultCapabilitiesResource.java | 15157 | /*
* Copyright (c) 2010 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ovirt.engine.api.common.resource;
import org.ovirt.engine.api.model.BootDevice;
import org.ovirt.engine.api.model.BootDevices;
import org.ovirt.engine.api.model.Capabilities;
import org.ovirt.engine.api.model.CPU;
import org.ovirt.engine.api.model.CPUs;
import org.ovirt.engine.api.model.CreationStates;
import org.ovirt.engine.api.model.CreationStatus;
import org.ovirt.engine.api.model.DataCenterStates;
import org.ovirt.engine.api.model.DataCenterStatus;
import org.ovirt.engine.api.model.DiskFormat;
import org.ovirt.engine.api.model.DiskFormats;
import org.ovirt.engine.api.model.DiskInterface;
import org.ovirt.engine.api.model.DiskInterfaces;
import org.ovirt.engine.api.model.DiskStates;
import org.ovirt.engine.api.model.DiskStatus;
import org.ovirt.engine.api.model.DiskType;
import org.ovirt.engine.api.model.DiskTypes;
import org.ovirt.engine.api.model.DisplayType;
import org.ovirt.engine.api.model.DisplayTypes;
import org.ovirt.engine.api.model.FenceType;
import org.ovirt.engine.api.model.FenceTypes;
import org.ovirt.engine.api.model.NicInterface;
import org.ovirt.engine.api.model.NicInterfaces;
import org.ovirt.engine.api.model.HostNICStates;
import org.ovirt.engine.api.model.HostStates;
import org.ovirt.engine.api.model.HostStatus;
import org.ovirt.engine.api.model.NetworkStates;
import org.ovirt.engine.api.model.NetworkStatus;
import org.ovirt.engine.api.model.NicStatus;
import org.ovirt.engine.api.model.OsType;
import org.ovirt.engine.api.model.OsTypes;
import org.ovirt.engine.api.model.PowerManagement;
import org.ovirt.engine.api.model.Option;
import org.ovirt.engine.api.model.Options;
import org.ovirt.engine.api.model.PowerManagementStates;
import org.ovirt.engine.api.model.PowerManagementStatus;
import org.ovirt.engine.api.model.PowerManagers;
import org.ovirt.engine.api.model.SchedulingPolicies;
import org.ovirt.engine.api.model.SchedulingPolicyType;
import org.ovirt.engine.api.model.StorageDomainStates;
import org.ovirt.engine.api.model.StorageDomainStatus;
import org.ovirt.engine.api.model.StorageDomainType;
import org.ovirt.engine.api.model.StorageDomainTypes;
import org.ovirt.engine.api.model.StorageType;
import org.ovirt.engine.api.model.StorageTypes;
import org.ovirt.engine.api.model.TemplateStates;
import org.ovirt.engine.api.model.TemplateStatus;
import org.ovirt.engine.api.model.VersionCaps;
import org.ovirt.engine.api.model.VmStates;
import org.ovirt.engine.api.model.VmStatus;
import org.ovirt.engine.api.model.VmType;
import org.ovirt.engine.api.model.VmTypes;
import org.ovirt.engine.api.resource.CapabilitiesResource;
public class DefaultCapabilitiesResource implements CapabilitiesResource {
private final VersionCaps VERSION22 = buildVersion(2, 2, true);
private final VersionCaps VERSION21 = buildVersion(2, 1, false);
private VersionCaps buildVersion(int major, int minor, boolean current) {
VersionCaps version = new VersionCaps();
version.setMajor(major);
version.setMinor(minor);
version.setCPUs(new CPUs());
version.setCurrent(current);
return version;
}
{
addCpu(VERSION21, "Intel Xeon w/o XD/NX", 2);
addCpu(VERSION21, "Intel Xeon", 3);
addCpu(VERSION22, "Intel Xeon Core2", 4);
addCpu(VERSION22, "Intel Xeon 45nm Core2", 5);
addCpu(VERSION22, "Intel Xeon Core i7", 6);
addCpu(VERSION21, "AMD Opteron G1 w/o NX", 2);
addCpu(VERSION21, "AMD Opteron G1", 3);
addCpu(VERSION22, "AMD Opteron G2", 4);
addCpu(VERSION22, "AMD Opteron G3", 5);
}
private void addCpu(VersionCaps version, String id, int level) {
CPU cpu = new CPU();
cpu.setId(id);
cpu.setLevel(level);
version.getCPUs().getCPUs().add(cpu);
if (version == VERSION21) {
addCpu(VERSION22, id, level);
}
}
{
addPowerManager("alom", "secure=bool,port=int");
addPowerManager("apc", "secure=bool,port=int,slot=int");
addPowerManager("bladecenter", "secure=bool,port=int,slot=int");
addPowerManager("drac5", "secure=bool,port=int");
addPowerManager("eps", "slot=int");
addPowerManager("ilo", "secure=bool,port=int");
addPowerManager("ipmilan", "");
addPowerManager("rsa", "secure=bool,port=int");
addPowerManager("rsb", "");
addPowerManager("wti", "secure=bool,port=int,slot=int");
}
private void addPowerManager(String type, String options) {
addPowerManager(VERSION21, type, options);
addPowerManager(VERSION22, type, options);
}
private void addPowerManager(VersionCaps version, String type, String options) {
PowerManagement powerManagement = new PowerManagement();
powerManagement.setType(type);
powerManagement.setOptions(new Options());
String[] opts = options.split(",");
for (int i = 0; i < opts.length; i++) {
if (opts[i].isEmpty()) {
continue;
}
String[] parts = opts[i].split("=");
Option option = new Option();
option.setName(parts[0]);
option.setType(parts[1]);
powerManagement.getOptions().getOptions().add(option);
}
if (!version.isSetPowerManagers()) {
version.setPowerManagers(new PowerManagers());
}
version.getPowerManagers().getPowerManagers().add(powerManagement);
}
private void addVmTypes(VersionCaps version, VmType... types) {
version.setVmTypes(new VmTypes());
for (VmType type : types) {
version.getVmTypes().getVmTypes().add(type.value());
}
}
{
addVmTypes(VERSION21, VmType.values());
addVmTypes(VERSION22, VmType.values());
}
private void addStorageTypes(VersionCaps version, StorageType... types) {
version.setStorageTypes(new StorageTypes());
for (StorageType type : types) {
version.getStorageTypes().getStorageTypes().add(type.value());
}
}
{
addStorageTypes(VERSION21, StorageType.ISCSI, StorageType.FCP, StorageType.NFS);
addStorageTypes(VERSION22, StorageType.ISCSI, StorageType.FCP, StorageType.NFS);
}
private void addOsTypes(VersionCaps version, OsType... types) {
version.setOsTypes(new OsTypes());
for (OsType type : types) {
version.getOsTypes().getOsTypes().add(type.value());
}
}
{
addOsTypes(VERSION21, OsType.values());
addOsTypes(VERSION22, OsType.values());
}
private void addStorageDomainTypes(VersionCaps version, StorageDomainType... types) {
version.setStorageDomainTypes(new StorageDomainTypes());
for (StorageDomainType type : types) {
version.getStorageDomainTypes().getStorageDomainTypes().add(type.value());
}
}
{
addStorageDomainTypes(VERSION21, StorageDomainType.values());
addStorageDomainTypes(VERSION22, StorageDomainType.values());
}
private void addFenceTypes(VersionCaps version, FenceType... types) {
version.setFenceTypes(new FenceTypes());
for (FenceType type : types) {
version.getFenceTypes().getFenceTypes().add(type.value());
}
}
{
addFenceTypes(VERSION21, FenceType.values());
addFenceTypes(VERSION22, FenceType.values());
}
private void addBootDevices(VersionCaps version, BootDevice... devs) {
version.setBootDevices(new BootDevices());
for (BootDevice dev : devs) {
version.getBootDevices().getBootDevices().add(dev.value());
}
}
{
addBootDevices(VERSION21, BootDevice.values());
addBootDevices(VERSION22, BootDevice.values());
}
private void addDisplayTypes(VersionCaps version, DisplayType... types) {
version.setDisplayTypes(new DisplayTypes());
for (DisplayType type : types) {
version.getDisplayTypes().getDisplayTypes().add(type.value());
}
}
{
addDisplayTypes(VERSION21, DisplayType.values());
addDisplayTypes(VERSION22, DisplayType.values());
}
private void addNicInterfaces(VersionCaps version, NicInterface... types) {
version.setNicInterfaces(new NicInterfaces());
for (NicInterface type : types) {
version.getNicInterfaces().getNicInterfaces().add(type.value());
}
}
{
addNicInterfaces(VERSION21, NicInterface.values());
addNicInterfaces(VERSION22, NicInterface.values());
}
private void addDiskTypes(VersionCaps version, DiskType... types) {
version.setDiskTypes(new DiskTypes());
for (DiskType type : types) {
version.getDiskTypes().getDiskTypes().add(type.value());
}
}
{
addDiskTypes(VERSION21, DiskType.values());
addDiskTypes(VERSION22, DiskType.values());
}
private void addDiskFormats(VersionCaps version, DiskFormat... types) {
version.setDiskFormats(new DiskFormats());
for (DiskFormat type : types) {
version.getDiskFormats().getDiskFormats().add(type.value());
}
}
{
addDiskFormats(VERSION21, DiskFormat.values());
addDiskFormats(VERSION22, DiskFormat.values());
}
private void addDiskInterfaces(VersionCaps version, DiskInterface... interfaces) {
version.setDiskInterfaces(new DiskInterfaces());
for (DiskInterface iface : interfaces) {
version.getDiskInterfaces().getDiskInterfaces().add(iface.value());
}
}
{
addDiskInterfaces(VERSION21, DiskInterface.values());
addDiskInterfaces(VERSION22, DiskInterface.values());
}
private void addCreationStates(VersionCaps version, CreationStatus... statuses) {
version.setCreationStates(new CreationStates());
for (CreationStatus status : statuses) {
version.getCreationStates().getCreationStates().add(status.value());
}
}
{
addCreationStates(VERSION21, CreationStatus.values());
addCreationStates(VERSION22, CreationStatus.values());
}
private void addPowerManagementStates(VersionCaps version, PowerManagementStatus... statuses) {
version.setPowerManagementStates(new PowerManagementStates());
for (PowerManagementStatus status : statuses) {
version.getPowerManagementStates().getPowerManagementStates().add(status.value());
}
}
{
addPowerManagementStates(VERSION21, PowerManagementStatus.values());
addPowerManagementStates(VERSION22, PowerManagementStatus.values());
}
private void addHostStates(VersionCaps version, HostStatus... statuses) {
version.setHostStates(new HostStates());
for (HostStatus status : statuses) {
version.getHostStates().getHostStates().add(status.value());
}
}
{
addHostStates(VERSION21, HostStatus.values());
addHostStates(VERSION22, HostStatus.values());
}
private void addNetworkStates(VersionCaps version, NetworkStatus... statuses) {
version.setNetworkStates(new NetworkStates());
for (NetworkStatus status : statuses) {
version.getNetworkStates().getNetworkStates().add(status.value());
}
}
{
addNetworkStates(VERSION21, NetworkStatus.values());
addNetworkStates(VERSION22, NetworkStatus.values());
}
private void addStorageDomainStates(VersionCaps version, StorageDomainStatus... statuses) {
version.setStorageDomainStates(new StorageDomainStates());
for (StorageDomainStatus status : statuses) {
version.getStorageDomainStates().getStorageDomainStates().add(status.value());
}
}
{
addStorageDomainStates(VERSION21, StorageDomainStatus.values());
addStorageDomainStates(VERSION22, StorageDomainStatus.values());
}
private void addTemplateStates(VersionCaps version, TemplateStatus... statuses) {
version.setTemplateStates(new TemplateStates());
for (TemplateStatus status : statuses) {
version.getTemplateStates().getTemplateStates().add(status.value());
}
}
{
addTemplateStates(VERSION21, TemplateStatus.values());
addTemplateStates(VERSION22, TemplateStatus.values());
}
private void addVmStates(VersionCaps version, VmStatus... statuses) {
version.setVmStates(new VmStates());
for (VmStatus status : statuses) {
version.getVmStates().getVmStates().add(status.value());
}
}
{
addVmStates(VERSION21, VmStatus.values());
addVmStates(VERSION22, VmStatus.values());
}
private void addDiskStates(VersionCaps version, DiskStatus... statuses) {
version.setDiskStates(new DiskStates());
for (DiskStatus status : statuses) {
version.getDiskStates().getDiskStates().add(status.value());
}
}
{
addDiskStates(VERSION21, DiskStatus.values());
addDiskStates(VERSION22, DiskStatus.values());
}
private void addDataCenterStates(VersionCaps version, DataCenterStatus... statuses) {
version.setDataCenterStates(new DataCenterStates());
for (DataCenterStatus status : statuses) {
version.getDataCenterStates().getDataCenterStates().add(status.value());
}
}
{
addDataCenterStates(VERSION21, DataCenterStatus.values());
addDataCenterStates(VERSION22, DataCenterStatus.values());
}
private void addHostNicStates(VersionCaps version, NicStatus... statuses) {
version.setHostNicStates(new HostNICStates());
for (NicStatus status : statuses) {
version.getHostNicStates().getHostNICStates().add(status.value());
}
}
{
addHostNicStates(VERSION21, NicStatus.values());
addHostNicStates(VERSION22, NicStatus.values());
}
private final SchedulingPolicies SCHEDULING_POLICIES = new SchedulingPolicies();
{
for (SchedulingPolicyType policy : SchedulingPolicyType.values()) {
SCHEDULING_POLICIES.getPolicy().add(policy.value());
}
}
public Capabilities get() {
Capabilities caps = new Capabilities();
caps.getVersions().add(VERSION22);
caps.getVersions().add(VERSION21);
caps.setSchedulingPolicies(SCHEDULING_POLICIES);
return caps;
}
}
| apache-2.0 |
strapdata/elassandra | server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java | 6330 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBuilder> {
@Override
protected RegexpQueryBuilder doCreateTestQueryBuilder() {
RegexpQueryBuilder query = randomRegexpQuery();
if (randomBoolean()) {
List<RegexpFlag> flags = new ArrayList<>();
int iter = randomInt(5);
for (int i = 0; i < iter; i++) {
flags.add(randomFrom(RegexpFlag.values()));
}
query.flags(flags.toArray(new RegexpFlag[flags.size()]));
}
if (randomBoolean()) {
query.maxDeterminizedStates(randomInt(50000));
}
if (randomBoolean()) {
query.rewrite(randomFrom(getRandomRewriteMethod()));
}
return query;
}
@Override
protected Map<String, RegexpQueryBuilder> getAlternateVersions() {
Map<String, RegexpQueryBuilder> alternateVersions = new HashMap<>();
RegexpQueryBuilder regexpQuery = randomRegexpQuery();
String contentString = "{\n" +
" \"regexp\" : {\n" +
" \"" + regexpQuery.fieldName() + "\" : \"" + regexpQuery.value() + "\"\n" +
" }\n" +
"}";
alternateVersions.put(contentString, regexpQuery);
return alternateVersions;
}
private static RegexpQueryBuilder randomRegexpQuery() {
// mapped or unmapped fields
String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, randomAlphaOfLengthBetween(1, 10));
String value = randomAlphaOfLengthBetween(1, 10);
return new RegexpQueryBuilder(fieldName, value);
}
@Override
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
assertThat(query, instanceOf(RegexpQuery.class));
RegexpQuery regexpQuery = (RegexpQuery) query;
String expectedFieldName = expectedFieldName( queryBuilder.fieldName());
assertThat(regexpQuery.getField(), equalTo(expectedFieldName));
}
public void testIllegalArguments() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new RegexpQueryBuilder(null, "text"));
assertEquals("field name is null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new RegexpQueryBuilder("", "text"));
assertEquals("field name is null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new RegexpQueryBuilder("field", null));
assertEquals("value cannot be null", e.getMessage());
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"regexp\" : {\n" +
" \"name.first\" : {\n" +
" \"value\" : \"s.*y\",\n" +
" \"flags_value\" : 7,\n" +
" \"max_determinized_states\" : 20000,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
"}";
RegexpQueryBuilder parsed = (RegexpQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "s.*y", parsed.value());
assertEquals(json, 20000, parsed.maxDeterminizedStates());
}
public void testNumeric() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
RegexpQueryBuilder query = new RegexpQueryBuilder(INT_FIELD_NAME, "12");
QueryShardContext context = createShardContext();
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context));
assertEquals("Can only use regexp queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
e.getMessage());
}
public void testParseFailsWithMultipleFields() throws IOException {
String json =
"{\n" +
" \"regexp\": {\n" +
" \"user1\": {\n" +
" \"value\": \"k.*y\"\n" +
" },\n" +
" \"user2\": {\n" +
" \"value\": \"k.*y\"\n" +
" }\n" +
" }\n" +
"}";
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
assertEquals("[regexp] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage());
String shortJson =
"{\n" +
" \"regexp\": {\n" +
" \"user1\": \"k.*y\",\n" +
" \"user2\": \"k.*y\"\n" +
" }\n" +
"}";
e = expectThrows(ParsingException.class, () -> parseQuery(shortJson));
assertEquals("[regexp] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage());
}
}
| apache-2.0 |
jianbingfang/xhf | src/main/java/com/xthena/jl/web/XgdwContactController.java | 5035 | package com.xthena.jl.web;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.xthena.api.user.UserConnector;
import com.xthena.core.hibernate.PropertyFilter;
import com.xthena.core.mapper.BeanMapper;
import com.xthena.core.page.Page;
import com.xthena.core.spring.MessageHelper;
import com.xthena.ext.export.Exportor;
import com.xthena.ext.export.TableModel;
import com.xthena.security.util.SpringSecurityUtils;
import com.xthena.jl.domain.XgdwContact;
import com.xthena.jl.manager.JlDeptManager;
import com.xthena.jl.manager.XgdwContactManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
@Controller
@RequestMapping("jl")
public class XgdwContactController {
private XgdwContactManager xgdwContactManager;
private Exportor exportor;
private BeanMapper beanMapper = new BeanMapper();
private UserConnector userConnector;
private MessageHelper messageHelper;
@Autowired
private JlDeptManager jlDeptManager;
@RequestMapping("xgdwContact-info-list")
public String list(@ModelAttribute Page page,
@RequestParam Map<String, Object> parameterMap, Model model,HttpServletRequest request) {
parameterMap.put("filter_EQL_fxmid", jlDeptManager.getXmId(request));
List<PropertyFilter> propertyFilters = PropertyFilter
.buildFromMap(parameterMap);
page = xgdwContactManager.pagedQuery(page, propertyFilters);
model.addAttribute("page", page);
return "jl/xgdwContact-info-list";
}
@RequestMapping("xgdwContact-info-input")
public String input(@RequestParam(value = "id", required = false) Long id,
Model model) {
if (id != null) {
XgdwContact xgdwContact = xgdwContactManager.get(id);
model.addAttribute("model", xgdwContact);
}
return "jl/xgdwContact-info-input";
}
@RequestMapping("xgdwContact-info-save")
public String save(@ModelAttribute XgdwContact xgdwContact,
@RequestParam Map<String, Object> parameterMap,HttpServletRequest request,
RedirectAttributes redirectAttributes) {
XgdwContact dest = null;
Long id = xgdwContact.getFid();
if (id != null) {
dest = xgdwContactManager.get(id);
beanMapper.copy(xgdwContact, dest);
} else {
dest = xgdwContact;
dest.setFxmid(jlDeptManager.getXmId(request));
}
xgdwContactManager.save(dest);
messageHelper.addFlashMessage(redirectAttributes, "core.success.save",
"保存成功");
return "redirect:/jl/xgdwContact-info-list.do";
}
@RequestMapping("xgdwContact-info-remove")
public String remove(@RequestParam("selectedItem") List<Long> selectedItem,
RedirectAttributes redirectAttributes) {
List<XgdwContact> xgdwContacts = xgdwContactManager.findByIds(selectedItem);
xgdwContactManager.removeAll(xgdwContacts);
messageHelper.addFlashMessage(redirectAttributes,
"core.success.delete", "删除成功");
return "redirect:/jl/xgdwContact-info-list.do";
}
@RequestMapping("xgdwContact-info-export")
public void export(@ModelAttribute Page page,
@RequestParam Map<String, Object> parameterMap,
HttpServletResponse response) throws Exception {
List<PropertyFilter> propertyFilters = PropertyFilter
.buildFromMap(parameterMap);
page = xgdwContactManager.pagedQuery(page, propertyFilters);
List<XgdwContact> xgdwContacts = (List<XgdwContact>) page.getResult();
TableModel tableModel = new TableModel();
//tableModel.setName("xgdwContact info");
//tableModel.addHeaders("id", "name");
tableModel.setData(xgdwContacts);
exportor.export(response, tableModel);
}
// ~ ======================================================================
@Resource
public void setXgdwContactManager(XgdwContactManager xgdwContactManager) {
this.xgdwContactManager = xgdwContactManager;
}
@Resource
public void setExportor(Exportor exportor) {
this.exportor = exportor;
}
@Resource
public void setUserConnector(UserConnector userConnector) {
this.userConnector = userConnector;
}
@Resource
public void setMessageHelper(MessageHelper messageHelper) {
this.messageHelper = messageHelper;
}
}
| apache-2.0 |
emetsger/fcrepo4 | fcrepo-kernel-api/src/main/java/org/fcrepo/kernel/api/services/functions/HierarchicalIdentifierSupplier.java | 1668 | /*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.kernel.api.services.functions;
import static java.util.UUID.randomUUID;
import java.util.stream.IntStream;
import java.util.StringJoiner;
/**
* Unique value minter that creates hierarchical IDs from a UUID
*
* @author awoods
* @author acoburn
*/
public interface HierarchicalIdentifierSupplier extends UniqueValueSupplier {
static final int DEFAULT_LENGTH = 2;
static final int DEFAULT_COUNT = 4;
/**
* Mint a unique identifier as a UUID
*
* @return uuid
*/
@Override
default public String get() {
final String s = randomUUID().toString();
final StringJoiner joiner = new StringJoiner("/", "", "/" + s);
IntStream.rangeClosed(0, DEFAULT_COUNT - 1)
.forEach(x -> joiner.add(s.substring(x * DEFAULT_LENGTH, (x + 1) * DEFAULT_LENGTH)));
return joiner.toString();
}
}
| apache-2.0 |
tjordanchat/rundeck | core/src/main/java/com/dtolabs/rundeck/core/authorization/providers/SourceProvider.java | 217 | package com.dtolabs.rundeck.core.authorization.providers;
import java.util.Iterator;
/**
* Created by greg on 7/21/15.
*/
public interface SourceProvider {
Iterator<CacheableYamlSource> getSourceIterator();
}
| apache-2.0 |
BrentDorsey/pipeline | stream.ml/kafka/0.10/demos/streams/streams/src/main/java/com/advancedspark/kafka/streams/StreamsRatingsApp.java | 4035 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.advancedspark.kafka.streams;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.ValueMapper;
import java.util.Arrays;
import java.util.Locale;
import java.util.Properties;
/**
* Demonstrates, using the high-level KStream DSL, how to implement the WordCount program
* that computes a simple word occurrence histogram from an input text.
*
* In this example, the input stream reads from a topic named "streams-file-input", where the values of messages
* represent lines of text; and the histogram output is written to topic "streams-wordcount-output" where each record
* is an updated count of a single word.
*
* Before running this example you must create the source topic (e.g. via bin/kafka-topics.sh --create ...)
* and write some data to it (e.g. via bin-kafka-console-producer.sh). Otherwise you won't see any data arriving in the output topic.
*/
public class StreamsRatingsApp {
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-ratings-app");
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "127.0.0.1:2181");
props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
KStreamBuilder builder = new KStreamBuilder();
KStream<String, String> source = builder.stream("item_ratings");
/*
KTable<String, Long> counts = source
.flatMapValues(new ValueMapper<String, Iterable<String>>() {
@Override
public Iterable<String> apply(String value) {
return Arrays.asList(value.toLowerCase(Locale.getDefault()).split(" "));
}
}).map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
@Override
public KeyValue<String, String> apply(String key, String value) {
return new KeyValue<>(value, value);
}
})
.countByKey("Counts");
// need to override value serde to Long type
counts.to(Serdes.String(), Serdes.Long(), "streams-wordcount-output");
*/
KafkaStreams streams = new KafkaStreams(builder, props);
streams.start();
// usually the stream application would be running forever,
// in this example we just let it run for some time and stop since the input data is finite.
Thread.sleep(60000L);
streams.close();
}
}
| apache-2.0 |
mpgerstl/tEFMA | ch/javasoft/jbase/FixedWidthMarshaller.java | 2418 | /*
* =============================================================================
* Simplified BSD License, see http://www.opensource.org/licenses/
* -----------------------------------------------------------------------------
* Copyright (c) 2008-2009, Marco Terzer, Zurich, Switzerland
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Swiss Federal Institute of Technology Zurich
* nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* =============================================================================
*/
package ch.javasoft.jbase;
/**
* A <code>FixedWidthMarshaller</code> is an extension of an
* entity marshaller where the row width is known, i.e. all
* entity instances need the same amount of storage to be
* persisted
*
* @param <E> The entity type
*/
public interface FixedWidthMarshaller<E> extends EntityMarshaller<E> {
/**
* The byte width of one row or single entity instance
*/
int getByteWidth();
}
| bsd-2-clause |
ema/conpaas | conpaas-services/src/conpaas/services/taskfarm/src/org/koala/runnersFramework/runners/bot/DummyMaster.java | 3087 | package org.koala.runnersFramework.runners.bot;
import java.util.Collection;
import java.util.HashMap;
import ibis.ipl.IbisIdentifier;
public class DummyMaster extends Master {
private HashMap<String,Job> doneJobs;
private HashMap<String,Job> schedJobs;
private HashMap<String,Host> hosts;
private int maxWorkers = 0;
private long timeUnit;
public DummyMaster(BoTRunner aBot) throws Exception {
super(aBot);
Collection<Cluster> clusters = bot.Clusters.values();
hosts = new HashMap<String, Host>();
for (Cluster cluster : clusters) {
maxWorkers += cluster.maxNodes;
timeUnit = cluster.timeUnit;
for(int i=0; i<cluster.maxNodes; i++) {
hosts.put(i+"@"+cluster.alias, new Host(i+"@"+cluster.alias,cluster.costUnit));
}
}
doneJobs = new HashMap<String, Job>();
schedJobs = new HashMap<String, Job>();
}
@Override
protected boolean areWeDone() {
// TODO Auto-generated method stub
return false;
}
@Override
protected Job handleJobRequest(IbisIdentifier from) {
// TODO Auto-generated method stub
return null;
}
@Override
protected Job handleJobResult(JobResult received, IbisIdentifier from) {
// TODO Auto-generated method stub
return null;
}
@Override
protected void handleLostConnections() {
// TODO Auto-generated method stub
}
@Override
public void run() {
while(!bot.tasks.isEmpty()) {
long mct = Long.MIN_VALUE;
String bestHost = "";
//HPDCJob schedJob = null;
Job schedJob = null;
for(Job job : bot.tasks) {
// HPDCJob j = (HPDCJob) job;
long mctj = Long.MAX_VALUE;
String bestHostJ = "";
long et = Long.parseLong(job.args[0]);//Long.parseLong(j.argC1);
for(Host host : hosts.values()) {
if(host.node.contains("slow")) {
if(mctj > host.EAT + 2* et /3) {
mctj = host.EAT + 2* et /3;
bestHostJ = host.node;
}
} else {
if(mctj > host.EAT + et) {
mctj = host.EAT + et;
bestHostJ = host.node;
}
}
}
if(mct < mctj) {
mct = mctj;
bestHost = bestHostJ;
schedJob = job;
}
}
hosts.get(bestHost).addJob(schedJob);
schedJobs.put(schedJob.jobID, schedJob);
bot.tasks.remove(schedJob);
System.out.println("Job " + schedJob.jobID + " with et: " + schedJob.args[0] + " was scheduled on machine " + bestHost + "; EAT is now " + hosts.get(bestHost).EAT);
}
long meat = Long.MIN_VALUE;
double price = 0.0;
for(Host host : hosts.values()) {
if(host.EAT > meat) meat = host.EAT;
price += Math.ceil((double)host.EAT / 60 / timeUnit) * host.cost;
}
System.out.println("Longest run should be: " + meat/60 + "m" + meat%60 + "s with a total cost of " + price);
}
@Override
public void startInitWorkers() {
// TODO Auto-generated method stub
}
@Override
public void terminateWorker(Cluster cluster, WorkerStats ws, String reason) {
// TODO Auto-generated method stub
}
}
| bsd-3-clause |
jdgarrett/geogig | src/core/src/test/java/org/locationtech/geogig/test/integration/ApplyPatchOpTest.java | 19920 | /* Copyright (c) 2013-2016 Boundless and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* Victor Olaya (Boundless) - initial implementation
*/
package org.locationtech.geogig.test.integration;
import java.util.ArrayList;
import java.util.Map;
import org.junit.Test;
import org.locationtech.geogig.model.Node;
import org.locationtech.geogig.model.NodeRef;
import org.locationtech.geogig.model.RevFeature;
import org.locationtech.geogig.model.RevFeatureType;
import org.locationtech.geogig.model.RevTree;
import org.locationtech.geogig.model.impl.RevFeatureBuilder;
import org.locationtech.geogig.model.impl.RevFeatureTypeBuilder;
import org.locationtech.geogig.plumbing.FindTreeChild;
import org.locationtech.geogig.plumbing.RevObjectParse;
import org.locationtech.geogig.plumbing.diff.AttributeDiff;
import org.locationtech.geogig.plumbing.diff.FeatureDiff;
import org.locationtech.geogig.plumbing.diff.FeatureTypeDiff;
import org.locationtech.geogig.plumbing.diff.GenericAttributeDiffImpl;
import org.locationtech.geogig.plumbing.diff.Patch;
import org.locationtech.geogig.porcelain.AddOp;
import org.locationtech.geogig.porcelain.ApplyPatchOp;
import org.locationtech.geogig.porcelain.CannotApplyPatchException;
import org.locationtech.geogig.repository.DiffEntry;
import org.locationtech.geogig.repository.WorkingTree;
import org.locationtech.geogig.storage.AutoCloseableIterator;
import org.opengis.feature.type.PropertyDescriptor;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
public class ApplyPatchOpTest extends RepositoryTestCase {
@Override
protected void setUpInternal() throws Exception {
}
private Optional<Node> findTreeChild(RevTree root, String pathRemove) {
Optional<NodeRef> nodeRef = geogig.command(FindTreeChild.class).setParent(root)
.setChildPath(pathRemove).call();
Optional<Node> node = Optional.absent();
if (nodeRef.isPresent()) {
node = Optional.of(nodeRef.get().getNode());
}
return node;
}
@Test
public void testAddFeaturePatch() throws Exception {
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
patch.addAddedFeature(path, RevFeatureBuilder.build(points1),
RevFeatureTypeBuilder.build(pointsType));
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
RevTree root = repo.workingTree().getTree();
assertNotNull(root);
Optional<Node> typeTreeId = findTreeChild(root, pointsName);
RevTree typeTree = repo.getTree(typeTreeId.get().getObjectId());
assertNotNull(typeTree);
Optional<Node> featureBlobId = findTreeChild(root, path);
assertTrue(featureBlobId.isPresent());
}
@Test
public void testRemoveFeaturePatch() throws Exception {
insert(points1);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
patch.addRemovedFeature(path, RevFeatureBuilder.build(points1),
RevFeatureTypeBuilder.build(pointsType));
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
RevTree root = repo.workingTree().getTree();
assertNotNull(root);
Optional<Node> featureBlobId = findTreeChild(root, path);
assertFalse(featureBlobId.isPresent());
}
@Test
public void testModifyFeatureAttributePatch() throws Exception {
insert(points1);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object oldValue = points1.getProperty("sp").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, "new");
map.put(pointsType.getDescriptor("sp"), diff);
FeatureDiff feaureDiff = new FeatureDiff(path, map, RevFeatureTypeBuilder.build(pointsType),
RevFeatureTypeBuilder.build(pointsType));
patch.addModifiedFeature(feaureDiff);
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
RevTree root = repo.workingTree().getTree();
Optional<Node> featureBlobId = findTreeChild(root, path);
assertTrue(featureBlobId.isPresent());
try (AutoCloseableIterator<DiffEntry> unstaged = repo.workingTree()
.getUnstaged(pointsName)) {
ArrayList<DiffEntry> diffs = Lists.newArrayList(unstaged);
assertEquals(2, diffs.size());
}
Optional<RevFeature> feature = geogig.command(RevObjectParse.class)
.setRefSpec("WORK_HEAD:" + path).call(RevFeature.class);
assertTrue(feature.isPresent());
ImmutableList<Optional<Object>> values = feature.get().getValues();
assertEquals("new", values.get(0).get());
}
@Test
public void testModifyFeatureAttributeOutdatedPatch() throws Exception {
insert(points1_modified);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object oldValue = points1.getProperty("sp").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, "new");
map.put(pointsType.getDescriptor("sp"), diff);
FeatureDiff feaureDiff = new FeatureDiff(path, map, RevFeatureTypeBuilder.build(pointsType),
RevFeatureTypeBuilder.build(pointsType));
patch.addModifiedFeature(feaureDiff);
try {
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
fail();
} catch (CannotApplyPatchException e) {
assertTrue(true);
}
}
@Test
public void testRemoveFeatureAttributePatch() throws Exception {
insert(points1B);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1B.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object oldValue = points1B.getProperty("extra").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, null);
map.put(modifiedPointsType.getDescriptor("extra"), diff);
FeatureDiff featureDiff = new FeatureDiff(path, map,
RevFeatureTypeBuilder.build(modifiedPointsType),
RevFeatureTypeBuilder.build(pointsType));
patch.addModifiedFeature(featureDiff);
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
Optional<RevFeature> feature = geogig.command(RevObjectParse.class)
.setRefSpec("WORK_HEAD:" + path).call(RevFeature.class);
assertTrue(feature.isPresent());
ImmutableList<Optional<Object>> values = feature.get().getValues();
assertEquals(points1.getProperties().size(), values.size());
assertFalse(values.contains("ExtraString"));
}
@Test
public void testAddFeatureAttributePatch() throws Exception {
insert(points1);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object newValue = points1B.getProperty("extra").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(null, newValue);
map.put(modifiedPointsType.getDescriptor("extra"), diff);
FeatureDiff featureDiff = new FeatureDiff(path, map,
RevFeatureTypeBuilder.build(pointsType),
RevFeatureTypeBuilder.build(modifiedPointsType));
patch.addModifiedFeature(featureDiff);
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
// TODO
}
@Test
public void testRemoveFeatureAttributeOutdatedPatch() throws Exception {
insert(points1B_modified);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1B.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object oldValue = points1B.getProperty("extra").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, null);
map.put(modifiedPointsType.getDescriptor("extra"), diff);
FeatureDiff featureDiff = new FeatureDiff(path, map,
RevFeatureTypeBuilder.build(modifiedPointsType),
RevFeatureTypeBuilder.build(pointsType));
patch.addModifiedFeature(featureDiff);
try {
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
fail();
} catch (CannotApplyPatchException e) {
assertTrue(true);
}
}
@Test
public void testAddFeatureAttributeOutdatedPatch() throws Exception {
insert(points1B);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object newValue = points1B.getProperty("extra").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(null, newValue);
map.put(modifiedPointsType.getDescriptor("extra"), diff);
FeatureDiff featureDiff = new FeatureDiff(path, map,
RevFeatureTypeBuilder.build(modifiedPointsType),
RevFeatureTypeBuilder.build(modifiedPointsType));
patch.addModifiedFeature(featureDiff);
try {
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
fail();
} catch (CannotApplyPatchException e) {
assertTrue(true);
}
}
@Test
public void testAddedFeatureExists() throws Exception {
insert(points1);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
patch.addAddedFeature(path, RevFeatureBuilder.build(points1),
RevFeatureTypeBuilder.build(pointsType));
try {
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
fail();
} catch (CannotApplyPatchException e) {
assertTrue(true);
}
}
@Test
public void testModifiedFeatureDoesNotExists() throws Exception {
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object oldValue = points1.getProperty("sp").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, "new");
map.put(pointsType.getDescriptor("sp"), diff);
FeatureDiff featureDiff = new FeatureDiff(path, map,
RevFeatureTypeBuilder.build(pointsType), RevFeatureTypeBuilder.build(pointsType));
patch.addModifiedFeature(featureDiff);
try {
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
fail();
} catch (CannotApplyPatchException e) {
assertTrue(true);
}
}
@Test
public void testRemovedFeatureDoesNotExists() throws Exception {
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
patch.addRemovedFeature(path, RevFeatureBuilder.build(points1),
RevFeatureTypeBuilder.build(pointsType));
try {
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
fail();
} catch (CannotApplyPatchException e) {
assertTrue(true);
}
}
@Test
public void testPartialApplication() throws Exception {
insert(points1, points2);
Patch patch = new Patch();
String pathRemove = NodeRef.appendChild(pointsName, points2.getIdentifier().getID());
patch.addRemovedFeature(pathRemove, RevFeatureBuilder.build(points2),
RevFeatureTypeBuilder.build(pointsType));
String pathModify = NodeRef.appendChild(pointsName, points1B.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object oldValue = points1B.getProperty("extra").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, null);
map.put(modifiedPointsType.getDescriptor("extra"), diff);
FeatureDiff featureDiff = new FeatureDiff(pathModify, map,
RevFeatureTypeBuilder.build(modifiedPointsType),
RevFeatureTypeBuilder.build(pointsType));
patch.addModifiedFeature(featureDiff);
Patch rejected = geogig.command(ApplyPatchOp.class).setPatch(patch).setApplyPartial(true)
.call();
assertFalse(rejected.isEmpty());
RevTree root = repo.workingTree().getTree();
assertNotNull(root);
Optional<Node> featureBlobId = findTreeChild(root, pathRemove);
assertFalse(featureBlobId.isPresent());
// now we take the rejected patch and apply it, and the new rejected should be identical to
// it
Patch newRejected = geogig.command(ApplyPatchOp.class).setPatch(rejected)
.setApplyPartial(true).call();
assertEquals(rejected, newRejected);
}
@Test
public void testApplyEmptyPatch() {
Patch patch = new Patch();
geogig.command(ApplyPatchOp.class).setPatch(patch).setApplyPartial(true).call();
}
@Test
public void testReversedPatch() throws Exception {
insert(points1, points2);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
Object oldValue = points1.getProperty("sp").getValue();
GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, "new");
map.put(pointsType.getDescriptor("sp"), diff);
FeatureDiff feaureDiff = new FeatureDiff(path, map, RevFeatureTypeBuilder.build(pointsType),
RevFeatureTypeBuilder.build(pointsType));
patch.addModifiedFeature(feaureDiff);
String removedPath = NodeRef.appendChild(pointsName, points2.getIdentifier().getID());
patch.addRemovedFeature(removedPath, RevFeatureBuilder.build(points2),
RevFeatureTypeBuilder.build(pointsType));
String addedPath = NodeRef.appendChild(pointsName, points3.getIdentifier().getID());
patch.addAddedFeature(addedPath, RevFeatureBuilder.build(points3),
RevFeatureTypeBuilder.build(pointsType));
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
geogig.command(ApplyPatchOp.class).setPatch(patch.reversed()).call();
RevTree root = repo.workingTree().getTree();
Optional<Node> featureBlobId = findTreeChild(root, removedPath);
assertTrue(featureBlobId.isPresent());
featureBlobId = findTreeChild(root, addedPath);
assertFalse(featureBlobId.isPresent());
Optional<RevFeature> feature = geogig.command(RevObjectParse.class)
.setRefSpec("WORK_HEAD:" + path).call(RevFeature.class);
assertTrue(feature.isPresent());
assertEquals(oldValue, feature.get().getValues().get(0).get());
}
@Test
public void testAddEmptyFeatureTypePatch() throws Exception {
Patch patch = new Patch();
RevFeatureType featureType = RevFeatureTypeBuilder.build(pointsType);
patch.addFeatureType(featureType);
patch.addAlteredTree(new FeatureTypeDiff(pointsName, null, featureType.getId()));
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
RevTree root = repo.workingTree().getTree();
assertNotNull(root);
Optional<Node> typeTreeId = findTreeChild(root, pointsName);
RevTree typeTree = repo.getTree(typeTreeId.get().getObjectId());
assertNotNull(typeTree);
assertEquals(featureType.getId(), typeTreeId.get().getMetadataId().get());
}
@Test
public void testRemoveEmptyFeatureTypePatch() throws Exception {
WorkingTree workingTree = geogig.getRepository().workingTree();
workingTree.createTypeTree(pointsName, pointsType);
geogig.command(AddOp.class).setUpdateOnly(false).call();
Patch patch = new Patch();
RevFeatureType featureType = RevFeatureTypeBuilder.build(pointsType);
patch.addFeatureType(featureType);
patch.addAlteredTree(new FeatureTypeDiff(pointsName, featureType.getId(), null));
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
RevTree root = repo.workingTree().getTree();
assertNotNull(root);
Optional<Node> typeTree = findTreeChild(root, pointsName);
assertFalse(typeTree.isPresent());
}
@Test
public void testModifiedFeatureType() throws Exception {
insert(points2, points3, points1B);
Patch patch = new Patch();
RevFeatureType oldFeatureType = RevFeatureTypeBuilder.build(pointsType);
RevFeatureType featureType = RevFeatureTypeBuilder.build(modifiedPointsType);
patch.addFeatureType(featureType);
patch.addAlteredTree(
new FeatureTypeDiff(pointsName, oldFeatureType.getId(), featureType.getId()));
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
RevTree root = repo.workingTree().getTree();
assertNotNull(root);
Optional<Node> typeTree = findTreeChild(root, pointsName);
assertTrue(typeTree.isPresent());
assertEquals(featureType.getId(), typeTree.get().getMetadataId().get());
Optional<Node> featureNode = findTreeChild(root, NodeRef.appendChild(pointsName, idP2));
assertTrue(featureNode.isPresent());
assertEquals(oldFeatureType.getId(), featureNode.get().getMetadataId().get());
featureNode = findTreeChild(root, NodeRef.appendChild(pointsName, idP1));
assertTrue(featureNode.isPresent());
assertFalse(featureNode.get().getMetadataId().isPresent());
}
@Test
public void testAddFeatureWithNonDefaultFeatureType() throws Exception {
insert(points2, points3);
Patch patch = new Patch();
String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
patch.addAddedFeature(path, RevFeatureBuilder.build(points1B),
RevFeatureTypeBuilder.build(modifiedPointsType));
geogig.command(ApplyPatchOp.class).setPatch(patch).call();
RevTree root = repo.workingTree().getTree();
assertNotNull(root);
Optional<Node> typeTreeId = findTreeChild(root, pointsName);
assertEquals(typeTreeId.get().getMetadataId().get(),
RevFeatureTypeBuilder.build(pointsType).getId());
RevTree typeTree = repo.getTree(typeTreeId.get().getObjectId());
assertNotNull(typeTree);
Optional<Node> featureBlobId = findTreeChild(root, path);
assertEquals(RevFeatureTypeBuilder.build(modifiedPointsType).getId(),
featureBlobId.get().getMetadataId().orNull());
assertTrue(featureBlobId.isPresent());
path = NodeRef.appendChild(pointsName, points3.getIdentifier().getID());
featureBlobId = findTreeChild(root, path);
assertEquals(null, featureBlobId.get().getMetadataId().orNull());
}
}
| bsd-3-clause |
LWJGL-CI/lwjgl3 | modules/lwjgl/opengl/src/generated/java/org/lwjgl/opengl/EXT422Pixels.java | 1795 | /*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengl;
/**
* Native bindings to the <a target="_blank" href="https://www.khronos.org/registry/OpenGL/extensions/EXT/EXT_422_pixels.txt">EXT_422_pixels</a> extension.
*
* <p>This extension provides support for converting 422 pixels in host memory to 444 pixels as part of the pixel storage operation.</p>
*
* <p>The pixel unpack storage operation treats a 422 pixel as a 2 element format where the first element is C (chrominance) and the second element is L
* (luminance). Luminance is present on all pixels; a full chrominance value requires two pixels.</p>
*
* <p>The pixel pack storage operation converts RGB to a 422 pixel defined as a 2 element format where the first element stored is C (chrominance) and the
* second element stored is L (luminance). Luminance is present on all pixels; a full chrominance value requires two pixels.</p>
*
* <p>Both averaging and non-averaging is supported for green and blue assignments for pack and unpack operations.</p>
*/
public final class EXT422Pixels {
/**
* Accepted by the {@code format} parameter of DrawPixels, ReadPixels, TexImage1D, TexImage2D, GetTexImage, TexImage3D, TexSubImage1D, TexSubImage2D,
* TexSubImage3D, GetHistogram, GetMinmax, ConvolutionFilter1D, ConvolutionFilter2D, ConvolutionFilter3D, GetConvolutionFilter, SeparableFilter2D,
* SeparableFilter3D, GetSeparableFilter, ColorTable, and GetColorTable.
*/
public static final int
GL_422_EXT = 0x80CC,
GL_422_REV_EXT = 0x80CD,
GL_422_AVERAGE_EXT = 0x80CE,
GL_422_REV_AVERAGE_EXT = 0x80CF;
private EXT422Pixels() {}
} | bsd-3-clause |
joyplus/joyplus-tv | joytv/src/com/fasterxml/jackson/databind/util/LRUMap.java | 1709 | package com.fasterxml.jackson.databind.util;
import java.io.*;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Helper for simple bounded LRU maps used for reusing lookup values.
*<p>
* Note that serialization behavior is such that contents are NOT serialized,
* on assumption that all use cases are for caching where persistence
* does not make sense. The only thing serialized is the cache size of Map.
*/
public class LRUMap<K,V> extends LinkedHashMap<K,V>
implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
protected final int _maxEntries;
public LRUMap(int initialEntries, int maxEntries)
{
super(initialEntries, 0.8f, true);
_maxEntries = maxEntries;
}
@Override
protected boolean removeEldestEntry(Map.Entry<K,V> eldest)
{
return size() > _maxEntries;
}
/*
/**********************************************************
/* Serializable overrides
/**********************************************************
*/
/**
* Ugly hack, to work through the requirement that _value is indeed final,
* and that JDK serialization won't call ctor(s) if Serializable is implemented.
*
* @since 2.1
*/
protected transient int _jdkSerializeMaxEntries;
private void readObject(ObjectInputStream in) throws IOException {
_jdkSerializeMaxEntries = in.readInt();
}
private void writeObject(ObjectOutputStream out) throws IOException {
out.writeInt(_jdkSerializeMaxEntries);
}
protected Object readResolve() {
return new LRUMap<Object,Object>(_jdkSerializeMaxEntries, _jdkSerializeMaxEntries);
}
}
| bsd-3-clause |
pecko/rultor | src/main/java/com/rultor/web/TkDaemon.java | 4521 | /**
* Copyright (c) 2009-2015, rultor.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the rultor.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rultor.web;
import com.rultor.agents.daemons.Tail;
import com.rultor.spi.Talk;
import com.rultor.spi.Talks;
import java.io.IOException;
import java.io.InputStream;
import java.io.SequenceInputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.logging.Level;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.CharEncoding;
import org.takes.Response;
import org.takes.facets.flash.RsFlash;
import org.takes.facets.fork.RqRegex;
import org.takes.facets.fork.TkRegex;
import org.takes.facets.forward.RsForward;
import org.takes.rs.RsFluent;
/**
* Single daemon.
*
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
* @since 1.50
*/
final class TkDaemon implements TkRegex {
/**
* Talks.
*/
private final transient Talks talks;
/**
* Ctor.
* @param tlks Talks
*/
TkDaemon(final Talks tlks) {
this.talks = tlks;
}
@Override
public Response act(final RqRegex req) throws IOException {
final long number = Long.parseLong(req.matcher().group(1));
if (!this.talks.exists(number)) {
throw new RsForward(
new RsFlash(
"there is no such page here",
Level.WARNING
)
);
}
final RqUser user = new RqUser(req);
if (!user.anonymous()
&& !user.canSee(this.talks.get(number))) {
throw new RsForward(
new RsFlash(
String.format(
// @checkstyle LineLength (1 line)
"according to .rultor.yml, you (%s) are not allowed to see this",
user
),
Level.WARNING
)
);
}
final String hash = req.matcher().group(2);
return new RsFluent()
.withBody(this.html(number, hash))
.withType("text/html; charset=utf-8")
.withHeader(
"X-Rultor-Daemon",
String.format("%s-%s", number, hash)
);
}
/**
* Get HTML.
* @param number Number
* @param hash Hash
* @return HTML
* @throws IOException If fails
*/
private InputStream html(final long number, final String hash)
throws IOException {
final Talk talk = this.talks.get(number);
final String head = IOUtils.toString(
this.getClass().getResourceAsStream("daemon/head.html"),
CharEncoding.UTF_8
).replace("TALK_NAME", talk.name());
return new SequenceInputStream(
Collections.enumeration(
Arrays.asList(
IOUtils.toInputStream(head),
new Tail(talk.read(), hash).read(),
this.getClass().getResourceAsStream("daemon/tail.html")
)
)
);
}
}
| bsd-3-clause |
hispindia/dhis2-Core | dhis-2/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/metadata/DataSetMetadataExportController.java | 2510 | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.webapi.controller.metadata;
import lombok.AllArgsConstructor;
import org.hisp.dhis.common.DhisApiVersion;
import org.hisp.dhis.dxf2.metadata.DataSetMetadataExportService;
import org.hisp.dhis.webapi.mvc.annotation.ApiVersion;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import com.fasterxml.jackson.databind.JsonNode;
/**
* @author Lars Helge Overland
*/
@Controller
@AllArgsConstructor
@RequestMapping( "/dataSetMetadata" )
@ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } )
public class DataSetMetadataExportController
{
private final DataSetMetadataExportService exportService;
@GetMapping
public ResponseEntity<JsonNode> getMetadata()
{
return ResponseEntity.ok( exportService.getDataSetMetadata() );
}
}
| bsd-3-clause |
ric2b/Vivaldi-browser | chromium/chrome/android/junit/src/org/chromium/chrome/browser/feed/NtpFeedSurfaceLifecycleManagerTest.java | 12581 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.feed;
import static org.mockito.AdditionalMatchers.or;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.chromium.chrome.browser.tab.TabHidingType.CHANGED_TABS;
import static org.chromium.chrome.browser.tab.TabSelectionType.FROM_NEW;
import static org.chromium.chrome.browser.tab.TabSelectionType.FROM_USER;
import android.app.Activity;
import androidx.test.filters.SmallTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
import org.chromium.base.ActivityState;
import org.chromium.base.ApplicationStatus;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.chrome.browser.preferences.Pref;
import org.chromium.chrome.browser.tab.TabHidingType;
import org.chromium.chrome.browser.tab.TabImpl;
import org.chromium.components.prefs.PrefService;
/**
* Unit tests for {@link FeedSurfaceLifecycleManager}.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class NtpFeedSurfaceLifecycleManagerTest {
@Mock
private Activity mActivity;
@Mock
private TabImpl mTab;
@Mock
private Stream mStream;
@Mock
private PrefService mPrefService;
private NtpFeedSurfaceLifecycleManager mNtpStreamLifecycleManager;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
// Initialize a test instance for PrefService.
when(mPrefService.getBoolean(anyString())).thenReturn(true);
doNothing().when(mPrefService).setBoolean(anyString(), anyBoolean());
NtpFeedSurfaceLifecycleManager.setPrefServiceForTesting(mPrefService);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.CREATED);
mNtpStreamLifecycleManager = new NtpFeedSurfaceLifecycleManager(mActivity, mTab, null);
verify(mStream, times(1)).onCreate(or(any(String.class), isNull()));
}
@After
public void tearDown() {
NtpFeedSurfaceLifecycleManager.setPrefServiceForTesting(null);
}
@Test
@SmallTest
public void testShow() {
// Verify that onShow is not called before activity started.
when((mTab).isHidden()).thenReturn(false);
when(mTab.isUserInteractable()).thenReturn(true);
mNtpStreamLifecycleManager.getTabObserverForTesting().onShown(mTab, FROM_NEW);
verify(mStream, times(0)).onShow();
// Verify that onShow is not called when Tab is hidden.
when((mTab).isHidden()).thenReturn(true);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STARTED);
verify(mStream, times(0)).onShow();
// Verify that onShow is called when Tab is shown and activity is started.
when((mTab).isHidden()).thenReturn(false);
mNtpStreamLifecycleManager.getTabObserverForTesting().onShown(mTab, FROM_NEW);
verify(mStream, times(1)).onShow();
// When the Stream is shown, it won't call Stream#onShow() again.
mNtpStreamLifecycleManager.getTabObserverForTesting().onShown(mTab, FROM_NEW);
verify(mStream, times(1)).onShow();
}
@Test
@SmallTest
public void testShow_ArticlesNotVisible() {
// Verify that onShow is not called when articles are set hidden by the user.
when(mPrefService.getBoolean(Pref.ARTICLES_LIST_VISIBLE)).thenReturn(false);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STARTED);
when((mTab).isHidden()).thenReturn(false);
when(mTab.isUserInteractable()).thenReturn(true);
mNtpStreamLifecycleManager.getTabObserverForTesting().onShown(mTab, FROM_NEW);
verify(mStream, times(0)).onShow();
// Verify that onShow is called when articles are set shown by the user.
when(mPrefService.getBoolean(Pref.ARTICLES_LIST_VISIBLE)).thenReturn(true);
mNtpStreamLifecycleManager.getTabObserverForTesting().onShown(mTab, FROM_NEW);
verify(mStream, times(1)).onShow();
// Verify that onHide is called after tab is hidden.
mNtpStreamLifecycleManager.getTabObserverForTesting().onHidden(mTab, CHANGED_TABS);
verify(mStream, times(1)).onHide();
}
@Test
@SmallTest
public void testHideFromActivityStopped() {
// Activate the Stream.
when((mTab).isHidden()).thenReturn(false);
when(mTab.isUserInteractable()).thenReturn(true);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.RESUMED);
verify(mStream, times(1)).onShow();
// Deactivate the Stream.
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.PAUSED);
// Verify that the Stream can be set hidden from inactive.
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STOPPED);
verify(mStream, times(1)).onHide();
}
@Test
@SmallTest
public void testHideFromTabHiddenAfterShow() {
// Show the stream.
when((mTab).isHidden()).thenReturn(false);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STARTED);
verify(mStream, times(1)).onShow();
// Hide the stream.
mNtpStreamLifecycleManager.getTabObserverForTesting().onHidden(
mTab, TabHidingType.CHANGED_TABS);
verify(mStream, times(1)).onShow();
verify(mStream, times(1)).onHide();
}
@Test
@SmallTest
public void testDestroy() {
// Verify that Stream#onDestroy is called on activity destroyed.
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.DESTROYED);
verify(mStream, times(1)).onDestroy();
}
@Test
@SmallTest
public void testDestroyAfterCreate() {
// After the Stream is destroyed, lifecycle methods should never be called. Directly calling
// destroy here to simulate destroy() being called on FeedNewTabPage destroyed.
mNtpStreamLifecycleManager.destroy();
verify(mStream, times(1)).onDestroy();
// Verify that lifecycle methods are not called after destroy.
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STARTED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.RESUMED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.PAUSED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STOPPED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.DESTROYED);
verify(mStream, times(0)).onShow();
verify(mStream, times(0)).onHide();
verify(mStream, times(1)).onDestroy();
}
@Test
@SmallTest
public void testDestroyAfterActivate() {
InOrder inOrder = Mockito.inOrder(mStream);
when((mTab).isHidden()).thenReturn(false);
when(mTab.isUserInteractable()).thenReturn(true);
// Activate the Stream.
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.RESUMED);
inOrder.verify(mStream).onShow();
verify(mStream, times(1)).onShow();
// Verify that onInactive and onHide is called before onDestroy.
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.DESTROYED);
inOrder.verify(mStream).onHide();
inOrder.verify(mStream).onDestroy();
verify(mStream, times(1)).onHide();
verify(mStream, times(1)).onDestroy();
}
@Test
@SmallTest
public void testFullActivityLifecycle() {
InOrder inOrder = Mockito.inOrder(mStream);
when((mTab).isHidden()).thenReturn(false);
when(mTab.isUserInteractable()).thenReturn(true);
// On activity start and resume (simulates app become foreground).
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STARTED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.RESUMED);
inOrder.verify(mStream).onShow();
verify(mStream, times(1)).onShow();
// On activity pause and then resume (simulates multi-window mode).
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.PAUSED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.RESUMED);
// On activity stop (simulates app switched to background).
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.PAUSED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STOPPED);
inOrder.verify(mStream).onHide();
verify(mStream, times(1)).onHide();
// On activity start (simulates app switched back to foreground).
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STARTED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.RESUMED);
inOrder.verify(mStream).onShow();
verify(mStream, times(2)).onShow();
// On activity pause, stop, and destroy (simulates app removed from Android recents).
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.PAUSED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STOPPED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.DESTROYED);
inOrder.verify(mStream).onHide();
inOrder.verify(mStream).onDestroy();
verify(mStream, times(2)).onHide();
verify(mStream, times(1)).onDestroy();
}
@Test
@SmallTest
public void testFullTabLifecycle() {
InOrder inOrder = Mockito.inOrder(mStream);
// On new tab page created.
when((mTab).isHidden()).thenReturn(true);
when(mTab.isUserInteractable()).thenReturn(false);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.STARTED);
ApplicationStatus.onStateChangeForTesting(mActivity, ActivityState.RESUMED);
verify(mStream, times(0)).onShow();
// On tab shown.
when((mTab).isHidden()).thenReturn(false);
mNtpStreamLifecycleManager.getTabObserverForTesting().onShown(mTab, FROM_NEW);
inOrder.verify(mStream).onShow();
verify(mStream, times(1)).onShow();
// On tab interactable.
when(mTab.isUserInteractable()).thenReturn(true);
mNtpStreamLifecycleManager.getTabObserverForTesting().onInteractabilityChanged(mTab, true);
// On tab un-interactable (simulates user enter the tab switcher).
when(mTab.isUserInteractable()).thenReturn(false);
mNtpStreamLifecycleManager.getTabObserverForTesting().onInteractabilityChanged(mTab, false);
// On tab interactable (simulates user exit the tab switcher).
when(mTab.isUserInteractable()).thenReturn(true);
mNtpStreamLifecycleManager.getTabObserverForTesting().onInteractabilityChanged(mTab, true);
// On tab un-interactable and hidden (simulates user switch to another tab).
when((mTab).isHidden()).thenReturn(true);
when(mTab.isUserInteractable()).thenReturn(false);
mNtpStreamLifecycleManager.getTabObserverForTesting().onInteractabilityChanged(mTab, false);
mNtpStreamLifecycleManager.getTabObserverForTesting().onHidden(mTab, CHANGED_TABS);
inOrder.verify(mStream).onHide();
verify(mStream, times(1)).onHide();
// On tab shown (simulates user switch back to this tab).
when((mTab).isHidden()).thenReturn(false);
mNtpStreamLifecycleManager.getTabObserverForTesting().onShown(mTab, FROM_USER);
inOrder.verify(mStream).onShow();
verify(mStream, times(2)).onShow();
// On tab destroy (simulates user close the tab or navigate to another URL).
mNtpStreamLifecycleManager.destroy();
inOrder.verify(mStream).onHide();
inOrder.verify(mStream).onDestroy();
verify(mStream, times(2)).onHide();
verify(mStream, times(1)).onDestroy();
}
}
| bsd-3-clause |
ouyangxiangshao/java-design-patterns | dao/src/main/java/com/iluwatar/dao/Customer.java | 1351 | package com.iluwatar.dao;
/**
*
* Customer
*
*/
public class Customer {
private int id;
private String firstName;
private String lastName;
/**
* Constructor
*/
public Customer(final int id, final String firstName, final String lastName) {
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
}
public int getId() {
return id;
}
public void setId(final int id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(final String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(final String lastName) {
this.lastName = lastName;
}
@Override
public String toString() {
return "Customer{" + "id=" + getId() + ", firstName='" + getFirstName() + '\'' + ", lastName='"
+ getLastName() + '\'' + '}';
}
@Override
public boolean equals(final Object o) {
boolean isEqual = false;
if (this == o) {
isEqual = true;
} else if (o != null && getClass() == o.getClass()) {
final Customer customer = (Customer) o;
if (getId() == customer.getId()) {
isEqual = true;
}
}
return isEqual;
}
@Override
public int hashCode() {
int result = getId();
return result;
}
}
| mit |
AlphaModder/SpongeAPI | src/main/java/org/spongepowered/api/data/manipulator/mutable/common/AbstractMappedData.java | 3850 | /*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.data.manipulator.mutable.common;
import com.google.common.base.Objects;
import com.google.common.collect.Maps;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.data.key.Key;
import org.spongepowered.api.data.manipulator.DataManipulator;
import org.spongepowered.api.data.manipulator.ImmutableDataManipulator;
import org.spongepowered.api.data.manipulator.immutable.ImmutableMappedData;
import org.spongepowered.api.data.manipulator.mutable.ListData;
import org.spongepowered.api.data.manipulator.mutable.MappedData;
import org.spongepowered.api.data.value.BaseValue;
import org.spongepowered.api.data.value.mutable.MapValue;
import org.spongepowered.api.util.CollectionUtils;
import java.util.Map;
/**
* A common implementation for {@link ListData}s provided by the API.
*
* @param <K> The type of key in the map
* @param <V> The type of value in the map
* @param <M> The type of {@link DataManipulator}
* @param <I> The type of {@link ImmutableDataManipulator}
*/
@SuppressWarnings("unchecked")
public abstract class AbstractMappedData<K, V, M extends MappedData<K, V, M, I>, I extends ImmutableMappedData<K, V, I, M>>
extends AbstractSingleData<Map<K, V>, M, I> implements MappedData<K, V, M, I> {
protected AbstractMappedData(Map<K, V> value, Key<? extends BaseValue<Map<K, V>>> usedKey) {
super(CollectionUtils.copyMap(value), usedKey);
}
@Override
protected MapValue<K, V> getValueGetter() {
return Sponge.getRegistry().getValueFactory().createMapValue((Key<MapValue<K, V>>) this.usedKey, this.getValue());
}
@Override
protected Map<K, V> getValue() {
return Maps.newHashMap(super.getValue());
}
@Override
protected M setValue(Map<K, V> value) {
return super.setValue(Maps.newHashMap(value));
}
@Override
public int hashCode() {
return 31 * super.hashCode() + Objects.hashCode(this.getValue());
}
@SuppressWarnings("rawtypes")
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
if (!super.equals(obj)) {
return false;
}
final AbstractMappedData other = (AbstractMappedData) obj;
return Objects.equal(this.getValue(), other.getValue());
}
@Override
public MapValue<K, V> getMapValue() {
return getValueGetter();
}
@Override
public Map<K, V> asMap() {
return getValue();
}
}
| mit |
csmith/DMDirc | src/main/java/com/dmdirc/commandparser/parsers/ServerCommandParser.java | 4838 | /*
* Copyright (c) 2006-2017 DMDirc Developers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.dmdirc.commandparser.parsers;
import com.dmdirc.ServerState;
import com.dmdirc.commandparser.CommandArguments;
import com.dmdirc.commandparser.CommandInfo;
import com.dmdirc.commandparser.CommandType;
import com.dmdirc.commandparser.commands.Command;
import com.dmdirc.commandparser.commands.context.CommandContext;
import com.dmdirc.commandparser.commands.context.ServerCommandContext;
import com.dmdirc.events.CommandErrorEvent;
import com.dmdirc.interfaces.CommandController;
import com.dmdirc.interfaces.Connection;
import com.dmdirc.events.eventbus.EventBus;
import com.dmdirc.interfaces.WindowModel;
import com.dmdirc.config.provider.AggregateConfigProvider;
import javax.annotation.Nonnull;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* A command parser used in the context of a server.
*/
public class ServerCommandParser extends GlobalCommandParser {
/** A version number for this class. */
private static final long serialVersionUID = 1;
/**
* The server instance that this parser is attached to.
*/
private final Connection server;
/**
* Creates a new command parser for server commands.
*
* @param configManager Config manager to read settings from
* @param commandController The controller to load commands from.
* @param eventBus Event bus to post events on
*/
public ServerCommandParser(
final AggregateConfigProvider configManager,
final CommandController commandController,
final EventBus eventBus,
final Connection connection) {
super(configManager, commandController, eventBus);
this.server = checkNotNull(connection);
}
/** Loads the relevant commands into the parser. */
@Override
protected void loadCommands() {
commandManager.loadCommands(this, CommandType.TYPE_GLOBAL, CommandType.TYPE_SERVER);
}
@Override
protected CommandContext getCommandContext(
final WindowModel origin,
final CommandInfo commandInfo,
final Command command,
final CommandArguments args) {
return new ServerCommandContext(origin, commandInfo, server);
}
@Override
protected void executeCommand(
@Nonnull final WindowModel origin,
final CommandInfo commandInfo,
final Command command,
final CommandArguments args,
final CommandContext context) {
if (commandInfo.getType() == CommandType.TYPE_SERVER) {
if (hasCommandOptions(command) && !getCommandOptions(command).allowOffline()
&& (server == null || (server.getState() != ServerState.CONNECTED
&& server.getState() != ServerState.CONNECTING)
|| !server.getParser().isPresent())) {
if (!args.isSilent()) {
origin.getEventBus().publishAsync(new CommandErrorEvent(origin,
"You must be connected to use this command"));
}
} else {
command.execute(origin, args, context);
}
} else {
super.executeCommand(origin, commandInfo, command, args, context);
}
}
/**
* Called when the input was a line of text that was not a command. This normally means it is
* sent to the server/channel/user as-is, with no further processing.
*
* @param origin The window in which the command was typed
* @param line The line input by the user
*/
@Override
protected void handleNonCommand(final WindowModel origin, final String line) {
server.sendLine(line);
}
}
| mit |
asposecells/Aspose_Cells_Cloud | SDKs/Aspose.Cells-Cloud-SDK-for-Java/src/main/java/com/aspose/cells/model/OleObject.java | 17282 | package com.aspose.cells.model;
public class OleObject {
private Boolean DisplayAsIcon = null;
private String FileFormatType = null;
private String ImageSourceFullName = null;
private Boolean IsAutoSize = null;
private Boolean IsLink = null;
private String ProgID = null;
private String SourceFullName = null;
private String Name = null;
private String MsoDrawingType = null;
private String AutoShapeType = null;
private String Placement = null;
private Integer UpperLeftRow = null;
private Integer Top = null;
private Integer UpperLeftColumn = null;
private Integer Left = null;
private Integer LowerRightRow = null;
private Integer Bottom = null;
private Integer LowerRightColumn = null;
private Integer Right = null;
private Integer Width = null;
private Integer Height = null;
private Integer X = null;
private Integer Y = null;
private Double RotationAngle = null;
private String HtmlText = null;
private String Text = null;
private String AlternativeText = null;
private String TextHorizontalAlignment = null;
private String TextHorizontalOverflow = null;
private String TextOrientationType = null;
private String TextVerticalAlignment = null;
private String TextVerticalOverflow = null;
private Boolean IsGroup = null;
private Boolean IsHidden = null;
private Boolean IsLockAspectRatio = null;
private Boolean IsLocked = null;
private Boolean IsPrintable = null;
private Boolean IsTextWrapped = null;
private Boolean IsWordArt = null;
private String LinkedCell = null;
private Integer ZOrderPosition = null;
private Link link = null;
/**
* getDisplayAsIcon
* Gets Boolean
* @return DisplayAsIcon
*/
public Boolean getDisplayAsIcon() {
return DisplayAsIcon;
}
/**
* setDisplayAsIcon
* Sets Boolean
* @param DisplayAsIcon Boolean
*/
public void setDisplayAsIcon(Boolean DisplayAsIcon) {
this.DisplayAsIcon = DisplayAsIcon;
}
/**
* getFileFormatType
* Gets String
* @return FileFormatType
*/
public String getFileFormatType() {
return FileFormatType;
}
/**
* setFileFormatType
* Sets String
* @param FileFormatType String
*/
public void setFileFormatType(String FileFormatType) {
this.FileFormatType = FileFormatType;
}
/**
* getImageSourceFullName
* Gets String
* @return ImageSourceFullName
*/
public String getImageSourceFullName() {
return ImageSourceFullName;
}
/**
* setImageSourceFullName
* Sets String
* @param ImageSourceFullName String
*/
public void setImageSourceFullName(String ImageSourceFullName) {
this.ImageSourceFullName = ImageSourceFullName;
}
/**
* getIsAutoSize
* Gets Boolean
* @return IsAutoSize
*/
public Boolean getIsAutoSize() {
return IsAutoSize;
}
/**
* setIsAutoSize
* Sets Boolean
* @param IsAutoSize Boolean
*/
public void setIsAutoSize(Boolean IsAutoSize) {
this.IsAutoSize = IsAutoSize;
}
/**
* getIsLink
* Gets Boolean
* @return IsLink
*/
public Boolean getIsLink() {
return IsLink;
}
/**
* setIsLink
* Sets Boolean
* @param IsLink Boolean
*/
public void setIsLink(Boolean IsLink) {
this.IsLink = IsLink;
}
/**
* getProgID
* Gets String
* @return ProgID
*/
public String getProgID() {
return ProgID;
}
/**
* setProgID
* Sets String
* @param ProgID String
*/
public void setProgID(String ProgID) {
this.ProgID = ProgID;
}
/**
* getSourceFullName
* Gets String
* @return SourceFullName
*/
public String getSourceFullName() {
return SourceFullName;
}
/**
* setSourceFullName
* Sets String
* @param SourceFullName String
*/
public void setSourceFullName(String SourceFullName) {
this.SourceFullName = SourceFullName;
}
/**
* getName
* Gets String
* @return Name
*/
public String getName() {
return Name;
}
/**
* setName
* Sets String
* @param Name String
*/
public void setName(String Name) {
this.Name = Name;
}
/**
* getMsoDrawingType
* Gets String
* @return MsoDrawingType
*/
public String getMsoDrawingType() {
return MsoDrawingType;
}
/**
* setMsoDrawingType
* Sets String
* @param MsoDrawingType String
*/
public void setMsoDrawingType(String MsoDrawingType) {
this.MsoDrawingType = MsoDrawingType;
}
/**
* getAutoShapeType
* Gets String
* @return AutoShapeType
*/
public String getAutoShapeType() {
return AutoShapeType;
}
/**
* setAutoShapeType
* Sets String
* @param AutoShapeType String
*/
public void setAutoShapeType(String AutoShapeType) {
this.AutoShapeType = AutoShapeType;
}
/**
* getPlacement
* Gets String
* @return Placement
*/
public String getPlacement() {
return Placement;
}
/**
* setPlacement
* Sets String
* @param Placement String
*/
public void setPlacement(String Placement) {
this.Placement = Placement;
}
/**
* getUpperLeftRow
* Gets Integer
* @return UpperLeftRow
*/
public Integer getUpperLeftRow() {
return UpperLeftRow;
}
/**
* setUpperLeftRow
* Sets Integer
* @param UpperLeftRow Integer
*/
public void setUpperLeftRow(Integer UpperLeftRow) {
this.UpperLeftRow = UpperLeftRow;
}
/**
* getTop
* Gets Integer
* @return Top
*/
public Integer getTop() {
return Top;
}
/**
* setTop
* Sets Integer
* @param Top Integer
*/
public void setTop(Integer Top) {
this.Top = Top;
}
/**
* getUpperLeftColumn
* Gets Integer
* @return UpperLeftColumn
*/
public Integer getUpperLeftColumn() {
return UpperLeftColumn;
}
/**
* setUpperLeftColumn
* Sets Integer
* @param UpperLeftColumn Integer
*/
public void setUpperLeftColumn(Integer UpperLeftColumn) {
this.UpperLeftColumn = UpperLeftColumn;
}
/**
* getLeft
* Gets Integer
* @return Left
*/
public Integer getLeft() {
return Left;
}
/**
* setLeft
* Sets Integer
* @param Left Integer
*/
public void setLeft(Integer Left) {
this.Left = Left;
}
/**
* getLowerRightRow
* Gets Integer
* @return LowerRightRow
*/
public Integer getLowerRightRow() {
return LowerRightRow;
}
/**
* setLowerRightRow
* Sets Integer
* @param LowerRightRow Integer
*/
public void setLowerRightRow(Integer LowerRightRow) {
this.LowerRightRow = LowerRightRow;
}
/**
* getBottom
* Gets Integer
* @return Bottom
*/
public Integer getBottom() {
return Bottom;
}
/**
* setBottom
* Sets Integer
* @param Bottom Integer
*/
public void setBottom(Integer Bottom) {
this.Bottom = Bottom;
}
/**
* getLowerRightColumn
* Gets Integer
* @return LowerRightColumn
*/
public Integer getLowerRightColumn() {
return LowerRightColumn;
}
/**
* setLowerRightColumn
* Sets Integer
* @param LowerRightColumn Integer
*/
public void setLowerRightColumn(Integer LowerRightColumn) {
this.LowerRightColumn = LowerRightColumn;
}
/**
* getRight
* Gets Integer
* @return Right
*/
public Integer getRight() {
return Right;
}
/**
* setRight
* Sets Integer
* @param Right Integer
*/
public void setRight(Integer Right) {
this.Right = Right;
}
/**
* getWidth
* Gets Integer
* @return Width
*/
public Integer getWidth() {
return Width;
}
/**
* setWidth
* Sets Integer
* @param Width Integer
*/
public void setWidth(Integer Width) {
this.Width = Width;
}
/**
* getHeight
* Gets Integer
* @return Height
*/
public Integer getHeight() {
return Height;
}
/**
* setHeight
* Sets Integer
* @param Height Integer
*/
public void setHeight(Integer Height) {
this.Height = Height;
}
/**
* getX
* Gets Integer
* @return X
*/
public Integer getX() {
return X;
}
/**
* setX
* Sets Integer
* @param X Integer
*/
public void setX(Integer X) {
this.X = X;
}
/**
* getY
* Gets Integer
* @return Y
*/
public Integer getY() {
return Y;
}
/**
* setY
* Sets Integer
* @param Y Integer
*/
public void setY(Integer Y) {
this.Y = Y;
}
/**
* getRotationAngle
* Gets Double
* @return RotationAngle
*/
public Double getRotationAngle() {
return RotationAngle;
}
/**
* setRotationAngle
* Sets Double
* @param RotationAngle Double
*/
public void setRotationAngle(Double RotationAngle) {
this.RotationAngle = RotationAngle;
}
/**
* getHtmlText
* Gets String
* @return HtmlText
*/
public String getHtmlText() {
return HtmlText;
}
/**
* setHtmlText
* Sets String
* @param HtmlText String
*/
public void setHtmlText(String HtmlText) {
this.HtmlText = HtmlText;
}
/**
* getText
* Gets String
* @return Text
*/
public String getText() {
return Text;
}
/**
* setText
* Sets String
* @param Text String
*/
public void setText(String Text) {
this.Text = Text;
}
/**
* getAlternativeText
* Gets String
* @return AlternativeText
*/
public String getAlternativeText() {
return AlternativeText;
}
/**
* setAlternativeText
* Sets String
* @param AlternativeText String
*/
public void setAlternativeText(String AlternativeText) {
this.AlternativeText = AlternativeText;
}
/**
* getTextHorizontalAlignment
* Gets String
* @return TextHorizontalAlignment
*/
public String getTextHorizontalAlignment() {
return TextHorizontalAlignment;
}
/**
* setTextHorizontalAlignment
* Sets String
* @param TextHorizontalAlignment String
*/
public void setTextHorizontalAlignment(String TextHorizontalAlignment) {
this.TextHorizontalAlignment = TextHorizontalAlignment;
}
/**
* getTextHorizontalOverflow
* Gets String
* @return TextHorizontalOverflow
*/
public String getTextHorizontalOverflow() {
return TextHorizontalOverflow;
}
/**
* setTextHorizontalOverflow
* Sets String
* @param TextHorizontalOverflow String
*/
public void setTextHorizontalOverflow(String TextHorizontalOverflow) {
this.TextHorizontalOverflow = TextHorizontalOverflow;
}
/**
* getTextOrientationType
* Gets String
* @return TextOrientationType
*/
public String getTextOrientationType() {
return TextOrientationType;
}
/**
* setTextOrientationType
* Sets String
* @param TextOrientationType String
*/
public void setTextOrientationType(String TextOrientationType) {
this.TextOrientationType = TextOrientationType;
}
/**
* getTextVerticalAlignment
* Gets String
* @return TextVerticalAlignment
*/
public String getTextVerticalAlignment() {
return TextVerticalAlignment;
}
/**
* setTextVerticalAlignment
* Sets String
* @param TextVerticalAlignment String
*/
public void setTextVerticalAlignment(String TextVerticalAlignment) {
this.TextVerticalAlignment = TextVerticalAlignment;
}
/**
* getTextVerticalOverflow
* Gets String
* @return TextVerticalOverflow
*/
public String getTextVerticalOverflow() {
return TextVerticalOverflow;
}
/**
* setTextVerticalOverflow
* Sets String
* @param TextVerticalOverflow String
*/
public void setTextVerticalOverflow(String TextVerticalOverflow) {
this.TextVerticalOverflow = TextVerticalOverflow;
}
/**
* getIsGroup
* Gets Boolean
* @return IsGroup
*/
public Boolean getIsGroup() {
return IsGroup;
}
/**
* setIsGroup
* Sets Boolean
* @param IsGroup Boolean
*/
public void setIsGroup(Boolean IsGroup) {
this.IsGroup = IsGroup;
}
/**
* getIsHidden
* Gets Boolean
* @return IsHidden
*/
public Boolean getIsHidden() {
return IsHidden;
}
/**
* setIsHidden
* Sets Boolean
* @param IsHidden Boolean
*/
public void setIsHidden(Boolean IsHidden) {
this.IsHidden = IsHidden;
}
/**
* getIsLockAspectRatio
* Gets Boolean
* @return IsLockAspectRatio
*/
public Boolean getIsLockAspectRatio() {
return IsLockAspectRatio;
}
/**
* setIsLockAspectRatio
* Sets Boolean
* @param IsLockAspectRatio Boolean
*/
public void setIsLockAspectRatio(Boolean IsLockAspectRatio) {
this.IsLockAspectRatio = IsLockAspectRatio;
}
/**
* getIsLocked
* Gets Boolean
* @return IsLocked
*/
public Boolean getIsLocked() {
return IsLocked;
}
/**
* setIsLocked
* Sets Boolean
* @param IsLocked Boolean
*/
public void setIsLocked(Boolean IsLocked) {
this.IsLocked = IsLocked;
}
/**
* getIsPrintable
* Gets Boolean
* @return IsPrintable
*/
public Boolean getIsPrintable() {
return IsPrintable;
}
/**
* setIsPrintable
* Sets Boolean
* @param IsPrintable Boolean
*/
public void setIsPrintable(Boolean IsPrintable) {
this.IsPrintable = IsPrintable;
}
/**
* getIsTextWrapped
* Gets Boolean
* @return IsTextWrapped
*/
public Boolean getIsTextWrapped() {
return IsTextWrapped;
}
/**
* setIsTextWrapped
* Sets Boolean
* @param IsTextWrapped Boolean
*/
public void setIsTextWrapped(Boolean IsTextWrapped) {
this.IsTextWrapped = IsTextWrapped;
}
/**
* getIsWordArt
* Gets Boolean
* @return IsWordArt
*/
public Boolean getIsWordArt() {
return IsWordArt;
}
/**
* setIsWordArt
* Sets Boolean
* @param IsWordArt Boolean
*/
public void setIsWordArt(Boolean IsWordArt) {
this.IsWordArt = IsWordArt;
}
/**
* getLinkedCell
* Gets String
* @return LinkedCell
*/
public String getLinkedCell() {
return LinkedCell;
}
/**
* setLinkedCell
* Sets String
* @param LinkedCell String
*/
public void setLinkedCell(String LinkedCell) {
this.LinkedCell = LinkedCell;
}
/**
* getZOrderPosition
* Gets Integer
* @return ZOrderPosition
*/
public Integer getZOrderPosition() {
return ZOrderPosition;
}
/**
* setZOrderPosition
* Sets Integer
* @param ZOrderPosition Integer
*/
public void setZOrderPosition(Integer ZOrderPosition) {
this.ZOrderPosition = ZOrderPosition;
}
/**
* getLink
* Gets Link
* @return link
*/
public Link getLink() {
return link;
}
/**
* setLink
* Sets Link
* @param link Link
*/
public void setLink(Link link) {
this.link = link;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class OleObject {\n");
sb.append(" DisplayAsIcon: ").append(DisplayAsIcon).append("\n");
sb.append(" FileFormatType: ").append(FileFormatType).append("\n");
sb.append(" ImageSourceFullName: ").append(ImageSourceFullName).append("\n");
sb.append(" IsAutoSize: ").append(IsAutoSize).append("\n");
sb.append(" IsLink: ").append(IsLink).append("\n");
sb.append(" ProgID: ").append(ProgID).append("\n");
sb.append(" SourceFullName: ").append(SourceFullName).append("\n");
sb.append(" Name: ").append(Name).append("\n");
sb.append(" MsoDrawingType: ").append(MsoDrawingType).append("\n");
sb.append(" AutoShapeType: ").append(AutoShapeType).append("\n");
sb.append(" Placement: ").append(Placement).append("\n");
sb.append(" UpperLeftRow: ").append(UpperLeftRow).append("\n");
sb.append(" Top: ").append(Top).append("\n");
sb.append(" UpperLeftColumn: ").append(UpperLeftColumn).append("\n");
sb.append(" Left: ").append(Left).append("\n");
sb.append(" LowerRightRow: ").append(LowerRightRow).append("\n");
sb.append(" Bottom: ").append(Bottom).append("\n");
sb.append(" LowerRightColumn: ").append(LowerRightColumn).append("\n");
sb.append(" Right: ").append(Right).append("\n");
sb.append(" Width: ").append(Width).append("\n");
sb.append(" Height: ").append(Height).append("\n");
sb.append(" X: ").append(X).append("\n");
sb.append(" Y: ").append(Y).append("\n");
sb.append(" RotationAngle: ").append(RotationAngle).append("\n");
sb.append(" HtmlText: ").append(HtmlText).append("\n");
sb.append(" Text: ").append(Text).append("\n");
sb.append(" AlternativeText: ").append(AlternativeText).append("\n");
sb.append(" TextHorizontalAlignment: ").append(TextHorizontalAlignment).append("\n");
sb.append(" TextHorizontalOverflow: ").append(TextHorizontalOverflow).append("\n");
sb.append(" TextOrientationType: ").append(TextOrientationType).append("\n");
sb.append(" TextVerticalAlignment: ").append(TextVerticalAlignment).append("\n");
sb.append(" TextVerticalOverflow: ").append(TextVerticalOverflow).append("\n");
sb.append(" IsGroup: ").append(IsGroup).append("\n");
sb.append(" IsHidden: ").append(IsHidden).append("\n");
sb.append(" IsLockAspectRatio: ").append(IsLockAspectRatio).append("\n");
sb.append(" IsLocked: ").append(IsLocked).append("\n");
sb.append(" IsPrintable: ").append(IsPrintable).append("\n");
sb.append(" IsTextWrapped: ").append(IsTextWrapped).append("\n");
sb.append(" IsWordArt: ").append(IsWordArt).append("\n");
sb.append(" LinkedCell: ").append(LinkedCell).append("\n");
sb.append(" ZOrderPosition: ").append(ZOrderPosition).append("\n");
sb.append(" link: ").append(link).append("\n");
sb.append("}\n");
return sb.toString();
}
}
| mit |
ljshj/actor-platform | actor-sdk/sdk-core/core/core-shared/src/main/java/im/actor/core/api/rpc/RequestJoinGroup.java | 2146 | package im.actor.core.api.rpc;
/*
* Generated by the Actor API Scheme generator. DO NOT EDIT!
*/
import im.actor.runtime.bser.*;
import im.actor.runtime.collections.*;
import static im.actor.runtime.bser.Utils.*;
import im.actor.core.network.parser.*;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.NotNull;
import com.google.j2objc.annotations.ObjectiveCName;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import im.actor.core.api.*;
public class RequestJoinGroup extends Request<ResponseJoinGroup> {
public static final int HEADER = 0xb4;
public static RequestJoinGroup fromBytes(byte[] data) throws IOException {
return Bser.parse(new RequestJoinGroup(), data);
}
private String token;
private List<ApiUpdateOptimization> optimizations;
public RequestJoinGroup(@NotNull String token, @NotNull List<ApiUpdateOptimization> optimizations) {
this.token = token;
this.optimizations = optimizations;
}
public RequestJoinGroup() {
}
@NotNull
public String getToken() {
return this.token;
}
@NotNull
public List<ApiUpdateOptimization> getOptimizations() {
return this.optimizations;
}
@Override
public void parse(BserValues values) throws IOException {
this.token = values.getString(1);
this.optimizations = new ArrayList<ApiUpdateOptimization>();
for (int b : values.getRepeatedInt(2)) {
optimizations.add(ApiUpdateOptimization.parse(b));
}
}
@Override
public void serialize(BserWriter writer) throws IOException {
if (this.token == null) {
throw new IOException();
}
writer.writeString(1, this.token);
for (ApiUpdateOptimization i : this.optimizations) {
writer.writeInt(2, i.getValue());
}
}
@Override
public String toString() {
String res = "rpc JoinGroup{";
res += "token=" + this.token;
res += "}";
return res;
}
@Override
public int getHeaderKey() {
return HEADER;
}
}
| mit |
garberg/NIPAP | jnipap/src/main/java/jnipap/VRF.java | 7446 | package jnipap;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import org.apache.xmlrpc.XmlRpcException;
import org.apache.xmlrpc.client.XmlRpcHttpTransportException;
import jnipap.NonExistentException;
import jnipap.ConnectionException;
import jnipap.AuthFailedException;
import jnipap.Connection;
public class VRF extends Jnipap {
// VRF attributes
public String rt;
public String name;
public String description;
/**
* Save object to NIPAP
*
* @param auth Authentication options
*/
public void save(Connection conn) throws JnipapException {
// create hashmap of VRF attributes
HashMap attr = new HashMap();
attr.put("rt", this.rt);
attr.put("name", this.name);
attr.put("description", this.description);
// create VRF spec
HashMap vrf_spec = new HashMap();
vrf_spec.put("id", this.id);
// create args map
HashMap args = new HashMap();
args.put("auth", conn.authMap());
args.put("attr", attr);
// create params list
List params = new ArrayList();
params.add(args);
// Create new or modify old?
Map vrf;
if (this.id == null) {
// ID null - create new VRF
vrf = (Map)conn.execute("add_vrf", params);
} else {
// VRF exists - modify existing.
args.put("vrf", vrf_spec);
Object[] result = (Object[])conn.execute("edit_vrf", params);
if (result.length != 1) {
throw new JnipapException("VRF edit returned " + result.length + " entries, should be 1.");
}
vrf = (Map)result[0];
}
// Update VRF object with new data
VRF.fromMap(vrf, this);
}
/**
* Remove object from NIPAP
*
* @param auth Authentication options
*/
public void remove(Connection conn) throws JnipapException {
// Build VRF spec
HashMap vrf_spec = new HashMap();
vrf_spec.put("id", this.id);
// Build function args
HashMap args = new HashMap();
args.put("auth", conn.authMap());
args.put("vrf", vrf_spec);
List params = new ArrayList();
params.add(args);
// execute query
Object[] result = (Object[])conn.execute("remove_vrf", params);
}
/**
* Return a string representation of a VRF
*
* @return String describing the VRF and its attributes
*/
public String toString() {
// Return string representation of a VRF
return getClass().getName() + " id: " + this.id +
" rt: " + this.rt +
" name: " + this.name +
" desc: " + this.description;
}
/**
* Get list of VRFs from NIPAP by its attributes
*
* @param auth Authentication options
* @param query Map describing the search query
* @param search_options Search options
* @return A list of VRF objects matching the attributes in the vrf_spec
*/
public static Map search(Connection conn, Map query, Map search_options) throws JnipapException {
// Build function args
HashMap args = new HashMap();
args.put("auth", conn.authMap());
args.put("query", query);
args.put("search_options", search_options);
List params = new ArrayList();
params.add(args);
// execute query
Map result = (Map)conn.execute("search_vrf", params);
// extract data from result
HashMap ret = new HashMap();
ret.put("search_options", (Map)result.get("search_options"));
ArrayList ret_vrfs = new ArrayList();
Object[] result_vrfs = (Object[])result.get("result");
for (int i = 0; i < result_vrfs.length; i++) {
Map result_vrf = (Map)result_vrfs[i];
ret_vrfs.add(VRF.fromMap(result_vrf));
}
ret.put("result", ret_vrfs);
return ret;
}
/**
* Get list of VRFs from NIPAP by a string
*
* @param auth Authentication options
* @param query Search string
* @param search_options Search options
* @return A list of VRF objects matching the attributes in the vrf_spec
*/
public static Map search(Connection conn, String query, Map search_options) throws JnipapException {
// Build function args
HashMap args = new HashMap();
args.put("auth", conn.authMap());
args.put("query_string", query);
args.put("search_options", search_options);
List params = new ArrayList();
params.add(args);
// execute query
Map result = (Map)conn.execute("smart_search_vrf", params);
// extract data from result
HashMap ret = new HashMap();
ret.put("search_options", (Map)result.get("search_options"));
ret.put("interpretation", (Map)result.get("interpretation"));
ArrayList ret_vrfs = new ArrayList();
Object[] result_vrfs = (Object[])result.get("result");
for (int i = 0; i < result_vrfs.length; i++) {
Map result_vrf = (Map)result_vrfs[i];
ret_vrfs.add(VRF.fromMap(result_vrf));
}
ret.put("result", ret_vrfs);
return ret;
}
/**
* List VRFs having specified attributes
*
* @param auth Authentication options
* @param vrf_spec Map where attributes can be specified
*/
public static List list(Connection conn, Map vrf_spec) throws JnipapException {
// Build function args
HashMap args = new HashMap();
args.put("auth", conn.authMap());
args.put("vrf", vrf_spec);
List params = new ArrayList();
params.add(args);
// execute query
Object[] result = (Object[])conn.execute("list_vrf", params);
// extract data from result
ArrayList ret = new ArrayList();
for (int i = 0; i < result.length; i++) {
Map result_vrf = (Map)result[i];
ret.add(VRF.fromMap(result_vrf));
}
return ret;
}
/**
* Get VRF from NIPAP by ID
*
* Fetch the VRF from NIPAP by specifying its ID. If no matching VRF
* is found, an exception is thrown.
*
* @param auth Authentication options
* @param id ID of requested VRF
* @return The VRF which was found
*/
public static VRF get(Connection conn, Integer id) throws JnipapException {
// Build VRF spec
HashMap vrf_spec = new HashMap();
vrf_spec.put("id", id);
List result = VRF.list(conn, vrf_spec);
// extract data from result
if (result.size() < 1) {
throw new NonExistentException("no matching VRF found");
}
return (VRF)result.get(0);
}
/**
* Create VRF object from map of VRF attributes
*
* Helper function for creating objects of data received over XML-RPC
*
* @param input Map with VRF attributes
* @return VRF object
*/
public static VRF fromMap(Map input) {
return VRF.fromMap(input, new VRF());
}
/**
* Update VRF object from map of VRF attributes
*
* Updates a VRF object with attributes from a Map as received over
* XML-RPC
*
* @param input Map with VRF attributes
* @param vrf VRF object to populate with attributes from map
* @return VRF object
*/
public static VRF fromMap(Map input, VRF vrf) {
vrf.id = (Integer)input.get("id");
vrf.rt = (String)input.get("rt");
vrf.name = (String)input.get("name");
vrf.description = (String)input.get("description");
return vrf;
}
/**
* Compute hash of VRF
*/
public int hashCode() {
int hash = super.hashCode();
hash = hash * 31 + (rt == null ? 0 : rt.hashCode());
hash = hash * 31 + (name == null ? 0 : name.hashCode());
hash = hash * 31 + (description == null ? 0 : description.hashCode());
return hash;
}
/**
* Verify equality
*/
public boolean equals(Object other) {
if (!super.equals(other)) return false;
VRF vrf = (VRF)other;
return (
(rt == vrf.rt || (rt != null && rt.equals(vrf.rt))) &&
(name == vrf.name || (name != null && name.equals(vrf.name))) &&
(description == vrf.description ||
(description != null && description.equals(vrf.description)))
);
}
}
| mit |
cefet-inf-2015/portal-educacao | Banco de Questões/java/bancodequestoes/package-info.java | 216 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bancodequestoes;
| mit |
Drup/z3 | src/api/java/ModelDecRefQueue.java | 674 | /**
Copyright (c) 2012-2014 Microsoft Corporation
Module Name:
ModelDecRefQueue.java
Abstract:
Author:
@author Christoph Wintersteiger (cwinter) 2012-03-15
Notes:
**/
package com.microsoft.z3;
class ModelDecRefQueue extends IDecRefQueue
{
protected void incRef(Context ctx, long obj)
{
try
{
Native.modelIncRef(ctx.nCtx(), obj);
} catch (Z3Exception e)
{
// OK.
}
}
protected void decRef(Context ctx, long obj)
{
try
{
Native.modelDecRef(ctx.nCtx(), obj);
} catch (Z3Exception e)
{
// OK.
}
}
};
| mit |
zik43/java-design-patterns | event-driven-architecture/src/main/java/com/iluwatar/eda/handler/UserCreatedEventHandler.java | 1704 | /*
* The MIT License
* Copyright © 2014-2019 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.eda.handler;
import com.iluwatar.eda.event.UserCreatedEvent;
import com.iluwatar.eda.framework.Handler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Handles the {@link UserCreatedEvent} message.
*/
public class UserCreatedEventHandler implements Handler<UserCreatedEvent> {
private static final Logger LOGGER = LoggerFactory.getLogger(UserCreatedEventHandler.class);
@Override
public void onEvent(UserCreatedEvent event) {
LOGGER.info("User '{}' has been Created!", event.getUser().getUsername());
}
}
| mit |
openwide-java/jsass | src/main/java/io/bit3/jsass/annotation/DefaultBooleanValue.java | 364 | package io.bit3.jsass.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.PARAMETER})
public @interface DefaultBooleanValue {
/**
* The default value.
*/
boolean value();
}
| mit |
prat0318/dbms | mini_dbms/je-5.0.103/examples/je/txn/PayloadData.java | 468 | package je.txn;
import java.io.Serializable;
public class PayloadData implements Serializable {
private int oID;
private String threadName;
private double doubleData;
PayloadData(int id, String name, double data) {
oID = id;
threadName = name;
doubleData = data;
}
public double getDoubleData() { return doubleData; }
public int getID() { return oID; }
public String getThreadName() { return threadName; }
}
| mit |
jdcasey/EGit | org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/actions/IgnoreActionHandler.java | 2196 | /*******************************************************************************
* Copyright (C) 2009, Alex Blewitt <alex.blewitt@gmail.com>
* Copyright (C) 2010, Jens Baumgart <jens.baumgart@sap.com>
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* See LICENSE for the full license text, also available.
*******************************************************************************/
package org.eclipse.egit.ui.internal.actions;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.egit.core.op.IgnoreOperation;
import org.eclipse.egit.ui.Activator;
import org.eclipse.egit.ui.UIText;
import org.eclipse.egit.ui.internal.decorators.GitLightweightDecorator;
/** Action for ignoring files via .gitignore. */
public class IgnoreActionHandler extends RepositoryActionHandler {
public Object execute(ExecutionEvent event) throws ExecutionException {
final IResource[] resources = getSelectedResources(event);
if (resources.length == 0)
return null;
final IgnoreOperation operation = new IgnoreOperation(resources);
String jobname = UIText.IgnoreActionHandler_addToGitignore;
Job job = new Job(jobname) {
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
operation.execute(monitor);
} catch (CoreException e) {
return Activator.createErrorStatus(e.getStatus()
.getMessage(), e);
}
if (operation.isGitignoreOutsideWSChanged())
GitLightweightDecorator.refresh();
return Status.OK_STATUS;
}
};
job.setUser(true);
job.setRule(operation.getSchedulingRule());
job.schedule();
return null;
}
@Override
public boolean isEnabled() {
// Do not consult Team.isIgnoredHint here because the user
// should be allowed to add ignored resources to .gitignore
return true;
}
}
| epl-1.0 |
debrief/debrief | org.mwc.asset.comms/docs/restlet_src/org.restlet.test/org/restlet/test/jaxrs/services/resources/HeadOptionsTestService.java | 2538 | /**
* Copyright 2005-2010 Noelios Technologies.
*
* The contents of this file are subject to the terms of one of the following
* open source licenses: LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL 1.0 (the
* "Licenses"). You can select the license that you prefer but you may not use
* this file except in compliance with one of these Licenses.
*
* You can obtain a copy of the LGPL 3.0 license at
* http://www.opensource.org/licenses/lgpl-3.0.html
*
* You can obtain a copy of the LGPL 2.1 license at
* http://www.opensource.org/licenses/lgpl-2.1.php
*
* You can obtain a copy of the CDDL 1.0 license at
* http://www.opensource.org/licenses/cddl1.php
*
* You can obtain a copy of the EPL 1.0 license at
* http://www.opensource.org/licenses/eclipse-1.0.php
*
* See the Licenses for the specific language governing permissions and
* limitations under the Licenses.
*
* Alternatively, you can obtain a royalty free commercial license with less
* limitations, transferable or non-transferable, directly at
* http://www.noelios.com/products/restlet-engine
*
* Restlet is a registered trademark of Noelios Technologies.
*/
package org.restlet.test.jaxrs.services.resources;
import javax.ws.rs.GET;
import javax.ws.rs.HEAD;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import org.restlet.test.jaxrs.services.others.OPTIONS;
import org.restlet.test.jaxrs.services.tests.HeadOptionsTest;
/**
* This class contains only data for one media type
*
* @author Stephan Koops
* @see HeadOptionsTest
* @see HEAD
* @see OPTIONS
*/
@Path("/headOptionsTest")
public class HeadOptionsTestService {
@GET
public void get() {
// do nothing
}
@GET
@Path("headTest1")
@Produces("text/html")
public String getTest1() {
return "4711";
}
@GET
@Path("headTest1")
@Produces("text/plain")
public String getTest1a() {
return "4711";
}
@GET
@Path("headTest2")
@Produces("text/html")
public String getTest2() {
return "4711";
}
@GET
@Path("headTest2")
@Produces("text/plain")
public String getTest2plain() {
return "4711";
}
@HEAD
@Path("headTest1")
@Produces("text/html")
public String headTest1() {
return null;
}
@HEAD
@Path("headTest2")
@Produces("text/html")
public String headTest2() {
return "4711";
}
@POST
@Path("headTest1")
public void post() {
// do nothing yet
}
} | epl-1.0 |
junit-team/junit-lambda | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/AbstractClassNameFilter.java | 1643 | /*
* Copyright 2015-2020 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v2.0 which
* accompanies this distribution and is available at
*
* https://www.eclipse.org/legal/epl-v20.html
*/
package org.junit.platform.engine.discovery;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import org.junit.platform.commons.util.Preconditions;
/**
* Abstract {@link ClassNameFilter} that servers as a superclass
* for filters including or excluding fully qualified class names
* based on pattern-matching.
*
* @since 1.0
*/
abstract class AbstractClassNameFilter implements ClassNameFilter {
protected final List<Pattern> patterns;
protected final String patternDescription;
AbstractClassNameFilter(String... patterns) {
Preconditions.notEmpty(patterns, "patterns array must not be null or empty");
Preconditions.containsNoNullElements(patterns, "patterns array must not contain null elements");
this.patterns = Arrays.stream(patterns).map(Pattern::compile).collect(toList());
this.patternDescription = Arrays.stream(patterns).collect(joining("' OR '", "'", "'"));
}
@Override
public abstract Predicate<String> toPredicate();
protected Optional<Pattern> findMatchingPattern(String className) {
return this.patterns.stream().filter(pattern -> pattern.matcher(className).matches()).findAny();
}
}
| epl-1.0 |
Jamstah/openhab2-addons | addons/binding/org.openhab.binding.yamahareceiver/src/main/java/org/openhab/binding/yamahareceiver/internal/protocol/xml/ZoneAvailableInputsXML.java | 3604 | /**
* Copyright (c) 2010-2018 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.yamahareceiver.internal.protocol.xml;
import java.io.IOException;
import java.lang.ref.WeakReference;
import org.openhab.binding.yamahareceiver.YamahaReceiverBindingConstants;
import org.openhab.binding.yamahareceiver.internal.protocol.AbstractConnection;
import org.openhab.binding.yamahareceiver.internal.protocol.ReceivedMessageParseException;
import org.openhab.binding.yamahareceiver.internal.protocol.ZoneAvailableInputs;
import org.openhab.binding.yamahareceiver.internal.state.AvailableInputState;
import org.openhab.binding.yamahareceiver.internal.state.AvailableInputStateListener;
import org.openhab.binding.yamahareceiver.internal.state.ZoneControlState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* The zone protocol class is used to control one zone of a Yamaha receiver with HTTP/xml.
* No state will be saved in here, but in {@link ZoneControlState} instead.
*
* @author David Gräff - Initial contribution
* @author Tomasz Maruszak - Refactoring
*/
public class ZoneAvailableInputsXML implements ZoneAvailableInputs {
private final Logger logger = LoggerFactory.getLogger(ZoneAvailableInputsXML.class);
private AvailableInputStateListener observer;
private final WeakReference<AbstractConnection> comReference;
private final YamahaReceiverBindingConstants.Zone zone;
public ZoneAvailableInputsXML(AbstractConnection xml, YamahaReceiverBindingConstants.Zone zone,
AvailableInputStateListener observer) {
this.comReference = new WeakReference<>(xml);
this.zone = zone;
this.observer = observer;
}
/**
* Return the zone
*/
public YamahaReceiverBindingConstants.Zone getZone() {
return zone;
}
public void update() throws IOException, ReceivedMessageParseException {
if (observer == null) {
return;
}
AbstractConnection com = comReference.get();
String response = com
.sendReceive(XMLUtils.wrZone(zone, "<Input><Input_Sel_Item>GetParam</Input_Sel_Item></Input>"));
Document doc = XMLUtils.xml(response);
if (doc.getFirstChild() == null) {
throw new ReceivedMessageParseException("<Input><Input_Sel_Item>GetParam failed: " + response);
}
Node inputSelItem = XMLUtils.getNode(doc.getFirstChild(), zone + "/Input/Input_Sel_Item");
NodeList items = inputSelItem.getChildNodes();
AvailableInputState state = new AvailableInputState();
for (int i = 0; i < items.getLength(); i++) {
Element item = (Element) items.item(i);
String name = item.getElementsByTagName("Param").item(0).getTextContent();
boolean writable = item.getElementsByTagName("RW").item(0).getTextContent().contains("W");
if (writable) {
state.availableInputs.put(XMLUtils.convertNameToID(name), name);
}
}
if (logger.isTraceEnabled()) {
logger.trace("Zone {} - available inputs: {}", zone, String.join(", ", state.availableInputs.keySet()));
}
observer.availableInputsChanged(state);
}
}
| epl-1.0 |
TypeFox/che | wsmaster/che-core-api-machine/src/main/java/org/eclipse/che/api/machine/server/spi/SnapshotDao.java | 3430 | /*
* Copyright (c) 2012-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.api.machine.server.spi;
import java.util.Collection;
import java.util.List;
import org.eclipse.che.api.core.NotFoundException;
import org.eclipse.che.api.machine.server.exception.SnapshotException;
import org.eclipse.che.api.machine.server.model.impl.SnapshotImpl;
/**
* Stores metadata of snapshots
*
* @author andrew00x
* @author Yevhenii Voevodin
*/
public interface SnapshotDao {
/**
* Retrieves snapshot metadata by machine related information.
*
* @param workspaceId workspace id
* @param envName name of environment
* @param machineName name of machine
* @return snapshot which matches given parameters
* @throws NotFoundException when snapshot with such workspaceId, envName and machineName doesn't
* exist
* @throws SnapshotException when any other error occurs
*/
SnapshotImpl getSnapshot(String workspaceId, String envName, String machineName)
throws NotFoundException, SnapshotException;
/**
* Retrieve snapshot metadata by id
*
* @param snapshotId id of required snapshot
* @return {@link SnapshotImpl} with specified id
* @throws NotFoundException if snapshot with specified id not found
* @throws SnapshotException if other error occurs
*/
SnapshotImpl getSnapshot(String snapshotId) throws NotFoundException, SnapshotException;
/**
* Save snapshot metadata
*
* @param snapshot snapshot metadata to store
* @throws SnapshotException if error occurs
*/
void saveSnapshot(SnapshotImpl snapshot) throws SnapshotException;
/**
* Find snapshots by workspace.
*
* @param workspaceId workspace specified in desired snapshot
* @return list of snapshot that satisfy provided queries, or empty list if no desired snapshots
* found
* @throws SnapshotException if error occurs
*/
List<SnapshotImpl> findSnapshots(String workspaceId) throws SnapshotException;
/**
* Remove snapshot by id
*
* @param snapshotId id of snapshot that should be removed
* @throws NotFoundException if snapshot with specified id not found
* @throws SnapshotException if other error occur
*/
void removeSnapshot(String snapshotId) throws NotFoundException, SnapshotException;
/**
* Replaces all the existing snapshots related to the given workspace with a new list of
* snapshots.
*
* @param workspaceId the id of the workspace to replace snapshots
* @param envName the name of the environment in workspace with given id which is used to search
* those snapshots that should be replaced
* @param newSnapshots the list of the snapshots which will be stored instead of existing ones
* @return the list of replaced(removed/old) snapshots for given workspace and environment, or an
* empty list when there is no a single snapshot for the given workspace
* @throws SnapshotException when any error occurs
*/
List<SnapshotImpl> replaceSnapshots(
String workspaceId, String envName, Collection<? extends SnapshotImpl> newSnapshots)
throws SnapshotException;
}
| epl-1.0 |
drbgfc/mdht | hl7/plugins/org.openhealthtools.mdht.emf.hl7.mif2/src/org/openhealthtools/mdht/emf/w3c/xhtml/util/XhtmlAdapterFactory.java | 37482 | /*******************************************************************************
* Copyright (c) 2006, 2009 David A Carlson
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David A Carlson (XMLmodeling.com) - initial API and implementation
*******************************************************************************/
package org.openhealthtools.mdht.emf.w3c.xhtml.util;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.common.notify.impl.AdapterFactoryImpl;
import org.eclipse.emf.ecore.EObject;
import org.openhealthtools.mdht.emf.w3c.xhtml.A;
import org.openhealthtools.mdht.emf.w3c.xhtml.AContent;
import org.openhealthtools.mdht.emf.w3c.xhtml.Abbr;
import org.openhealthtools.mdht.emf.w3c.xhtml.Acronym;
import org.openhealthtools.mdht.emf.w3c.xhtml.B;
import org.openhealthtools.mdht.emf.w3c.xhtml.Big;
import org.openhealthtools.mdht.emf.w3c.xhtml.Block;
import org.openhealthtools.mdht.emf.w3c.xhtml.Blockquote;
import org.openhealthtools.mdht.emf.w3c.xhtml.Br;
import org.openhealthtools.mdht.emf.w3c.xhtml.Caption;
import org.openhealthtools.mdht.emf.w3c.xhtml.Cite;
import org.openhealthtools.mdht.emf.w3c.xhtml.Code;
import org.openhealthtools.mdht.emf.w3c.xhtml.Col;
import org.openhealthtools.mdht.emf.w3c.xhtml.Colgroup;
import org.openhealthtools.mdht.emf.w3c.xhtml.Dd;
import org.openhealthtools.mdht.emf.w3c.xhtml.Del;
import org.openhealthtools.mdht.emf.w3c.xhtml.Dfn;
import org.openhealthtools.mdht.emf.w3c.xhtml.Div;
import org.openhealthtools.mdht.emf.w3c.xhtml.Dl;
import org.openhealthtools.mdht.emf.w3c.xhtml.Dt;
import org.openhealthtools.mdht.emf.w3c.xhtml.Em;
import org.openhealthtools.mdht.emf.w3c.xhtml.Flow;
import org.openhealthtools.mdht.emf.w3c.xhtml.Hr;
import org.openhealthtools.mdht.emf.w3c.xhtml.I;
import org.openhealthtools.mdht.emf.w3c.xhtml.Img;
import org.openhealthtools.mdht.emf.w3c.xhtml.Inline;
import org.openhealthtools.mdht.emf.w3c.xhtml.Ins;
import org.openhealthtools.mdht.emf.w3c.xhtml.Kbd;
import org.openhealthtools.mdht.emf.w3c.xhtml.Li;
import org.openhealthtools.mdht.emf.w3c.xhtml.Ol;
import org.openhealthtools.mdht.emf.w3c.xhtml.P;
import org.openhealthtools.mdht.emf.w3c.xhtml.Param;
import org.openhealthtools.mdht.emf.w3c.xhtml.Pre;
import org.openhealthtools.mdht.emf.w3c.xhtml.PreContent;
import org.openhealthtools.mdht.emf.w3c.xhtml.Q;
import org.openhealthtools.mdht.emf.w3c.xhtml.Samp;
import org.openhealthtools.mdht.emf.w3c.xhtml.Small;
import org.openhealthtools.mdht.emf.w3c.xhtml.Span;
import org.openhealthtools.mdht.emf.w3c.xhtml.Strong;
import org.openhealthtools.mdht.emf.w3c.xhtml.Sub;
import org.openhealthtools.mdht.emf.w3c.xhtml.Sup;
import org.openhealthtools.mdht.emf.w3c.xhtml.Table;
import org.openhealthtools.mdht.emf.w3c.xhtml.Tbody;
import org.openhealthtools.mdht.emf.w3c.xhtml.Td;
import org.openhealthtools.mdht.emf.w3c.xhtml.Tfoot;
import org.openhealthtools.mdht.emf.w3c.xhtml.Th;
import org.openhealthtools.mdht.emf.w3c.xhtml.Thead;
import org.openhealthtools.mdht.emf.w3c.xhtml.Tr;
import org.openhealthtools.mdht.emf.w3c.xhtml.Tt;
import org.openhealthtools.mdht.emf.w3c.xhtml.Ul;
import org.openhealthtools.mdht.emf.w3c.xhtml.Var;
import org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage;
/**
* <!-- begin-user-doc -->
* The <b>Adapter Factory</b> for the model.
* It provides an adapter <code>createXXX</code> method for each class of the model.
* <!-- end-user-doc -->
* @see org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage
* @generated
*/
public class XhtmlAdapterFactory extends AdapterFactoryImpl {
/**
* The cached model package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static XhtmlPackage modelPackage;
/**
* Creates an instance of the adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public XhtmlAdapterFactory() {
if (modelPackage == null) {
modelPackage = XhtmlPackage.eINSTANCE;
}
}
/**
* Returns whether this factory is applicable for the type of the object.
* <!-- begin-user-doc -->
* This implementation returns <code>true</code> if the object is either the model's package or is an instance object of the model.
* <!-- end-user-doc -->
* @return whether this factory is applicable for the type of the object.
* @generated
*/
@Override
public boolean isFactoryForType(Object object) {
if (object == modelPackage) {
return true;
}
if (object instanceof EObject) {
return ((EObject) object).eClass().getEPackage() == modelPackage;
}
return false;
}
/**
* The switch that delegates to the <code>createXXX</code> methods.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected XhtmlSwitch<Adapter> modelSwitch = new XhtmlSwitch<Adapter>() {
@Override
public Adapter caseA(A object) {
return createAAdapter();
}
@Override
public Adapter caseAbbr(Abbr object) {
return createAbbrAdapter();
}
@Override
public Adapter caseAContent(AContent object) {
return createAContentAdapter();
}
@Override
public Adapter caseAcronym(Acronym object) {
return createAcronymAdapter();
}
@Override
public Adapter caseB(B object) {
return createBAdapter();
}
@Override
public Adapter caseBig(Big object) {
return createBigAdapter();
}
@Override
public Adapter caseBlock(Block object) {
return createBlockAdapter();
}
@Override
public Adapter caseBlockquote(Blockquote object) {
return createBlockquoteAdapter();
}
@Override
public Adapter caseBr(Br object) {
return createBrAdapter();
}
@Override
public Adapter caseCaption(Caption object) {
return createCaptionAdapter();
}
@Override
public Adapter caseCite(Cite object) {
return createCiteAdapter();
}
@Override
public Adapter caseCode(Code object) {
return createCodeAdapter();
}
@Override
public Adapter caseCol(Col object) {
return createColAdapter();
}
@Override
public Adapter caseColgroup(Colgroup object) {
return createColgroupAdapter();
}
@Override
public Adapter caseDd(Dd object) {
return createDdAdapter();
}
@Override
public Adapter caseDel(Del object) {
return createDelAdapter();
}
@Override
public Adapter caseDfn(Dfn object) {
return createDfnAdapter();
}
@Override
public Adapter caseDiv(Div object) {
return createDivAdapter();
}
@Override
public Adapter caseDl(Dl object) {
return createDlAdapter();
}
@Override
public Adapter caseDt(Dt object) {
return createDtAdapter();
}
@Override
public Adapter caseEm(Em object) {
return createEmAdapter();
}
@Override
public Adapter caseFlow(Flow object) {
return createFlowAdapter();
}
@Override
public Adapter caseHr(Hr object) {
return createHrAdapter();
}
@Override
public Adapter caseI(I object) {
return createIAdapter();
}
@Override
public Adapter caseImg(Img object) {
return createImgAdapter();
}
@Override
public Adapter caseInline(Inline object) {
return createInlineAdapter();
}
@Override
public Adapter caseIns(Ins object) {
return createInsAdapter();
}
@Override
public Adapter caseKbd(Kbd object) {
return createKbdAdapter();
}
@Override
public Adapter caseLi(Li object) {
return createLiAdapter();
}
@Override
public Adapter caseObject(org.openhealthtools.mdht.emf.w3c.xhtml.Object object) {
return createObjectAdapter();
}
@Override
public Adapter caseOl(Ol object) {
return createOlAdapter();
}
@Override
public Adapter caseP(P object) {
return createPAdapter();
}
@Override
public Adapter caseParam(Param object) {
return createParamAdapter();
}
@Override
public Adapter casePre(Pre object) {
return createPreAdapter();
}
@Override
public Adapter casePreContent(PreContent object) {
return createPreContentAdapter();
}
@Override
public Adapter caseQ(Q object) {
return createQAdapter();
}
@Override
public Adapter caseSamp(Samp object) {
return createSampAdapter();
}
@Override
public Adapter caseSmall(Small object) {
return createSmallAdapter();
}
@Override
public Adapter caseSpan(Span object) {
return createSpanAdapter();
}
@Override
public Adapter caseStrong(Strong object) {
return createStrongAdapter();
}
@Override
public Adapter caseSub(Sub object) {
return createSubAdapter();
}
@Override
public Adapter caseSup(Sup object) {
return createSupAdapter();
}
@Override
public Adapter caseTable(Table object) {
return createTableAdapter();
}
@Override
public Adapter caseTbody(Tbody object) {
return createTbodyAdapter();
}
@Override
public Adapter caseTd(Td object) {
return createTdAdapter();
}
@Override
public Adapter caseTfoot(Tfoot object) {
return createTfootAdapter();
}
@Override
public Adapter caseTh(Th object) {
return createThAdapter();
}
@Override
public Adapter caseThead(Thead object) {
return createTheadAdapter();
}
@Override
public Adapter caseTr(Tr object) {
return createTrAdapter();
}
@Override
public Adapter caseTt(Tt object) {
return createTtAdapter();
}
@Override
public Adapter caseUl(Ul object) {
return createUlAdapter();
}
@Override
public Adapter caseVar(Var object) {
return createVarAdapter();
}
@Override
public Adapter defaultCase(EObject object) {
return createEObjectAdapter();
}
};
/**
* Creates an adapter for the <code>target</code>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param target the object to adapt.
* @return the adapter for the <code>target</code>.
* @generated
*/
@Override
public Adapter createAdapter(Notifier target) {
return modelSwitch.doSwitch((EObject) target);
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.A <em>A</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.A
* @generated
*/
public Adapter createAAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Abbr <em>Abbr</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Abbr
* @generated
*/
public Adapter createAbbrAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.AContent <em>AContent</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.AContent
* @generated
*/
public Adapter createAContentAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Acronym <em>Acronym</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Acronym
* @generated
*/
public Adapter createAcronymAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.B <em>B</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.B
* @generated
*/
public Adapter createBAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Big <em>Big</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Big
* @generated
*/
public Adapter createBigAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Block <em>Block</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Block
* @generated
*/
public Adapter createBlockAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Blockquote <em>Blockquote</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Blockquote
* @generated
*/
public Adapter createBlockquoteAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Br <em>Br</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Br
* @generated
*/
public Adapter createBrAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Caption <em>Caption</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Caption
* @generated
*/
public Adapter createCaptionAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Cite <em>Cite</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Cite
* @generated
*/
public Adapter createCiteAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Code <em>Code</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Code
* @generated
*/
public Adapter createCodeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Col <em>Col</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Col
* @generated
*/
public Adapter createColAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Colgroup <em>Colgroup</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Colgroup
* @generated
*/
public Adapter createColgroupAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Dd <em>Dd</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Dd
* @generated
*/
public Adapter createDdAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Del <em>Del</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Del
* @generated
*/
public Adapter createDelAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Dfn <em>Dfn</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Dfn
* @generated
*/
public Adapter createDfnAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Div <em>Div</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Div
* @generated
*/
public Adapter createDivAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Dl <em>Dl</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Dl
* @generated
*/
public Adapter createDlAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Dt <em>Dt</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Dt
* @generated
*/
public Adapter createDtAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Em <em>Em</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Em
* @generated
*/
public Adapter createEmAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Flow <em>Flow</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Flow
* @generated
*/
public Adapter createFlowAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Hr <em>Hr</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Hr
* @generated
*/
public Adapter createHrAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.I <em>I</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.I
* @generated
*/
public Adapter createIAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Img <em>Img</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Img
* @generated
*/
public Adapter createImgAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Inline <em>Inline</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Inline
* @generated
*/
public Adapter createInlineAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Ins <em>Ins</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Ins
* @generated
*/
public Adapter createInsAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Kbd <em>Kbd</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Kbd
* @generated
*/
public Adapter createKbdAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Li <em>Li</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Li
* @generated
*/
public Adapter createLiAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Object <em>Object</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Object
* @generated
*/
public Adapter createObjectAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Ol <em>Ol</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Ol
* @generated
*/
public Adapter createOlAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.P <em>P</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.P
* @generated
*/
public Adapter createPAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Param <em>Param</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Param
* @generated
*/
public Adapter createParamAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Pre <em>Pre</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Pre
* @generated
*/
public Adapter createPreAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.PreContent <em>Pre Content</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.PreContent
* @generated
*/
public Adapter createPreContentAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q <em>Q</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Q
* @generated
*/
public Adapter createQAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Samp <em>Samp</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Samp
* @generated
*/
public Adapter createSampAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Small <em>Small</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Small
* @generated
*/
public Adapter createSmallAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Span <em>Span</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Span
* @generated
*/
public Adapter createSpanAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Strong <em>Strong</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Strong
* @generated
*/
public Adapter createStrongAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Sub <em>Sub</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Sub
* @generated
*/
public Adapter createSubAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Sup <em>Sup</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Sup
* @generated
*/
public Adapter createSupAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Table <em>Table</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Table
* @generated
*/
public Adapter createTableAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Tbody <em>Tbody</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Tbody
* @generated
*/
public Adapter createTbodyAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Td <em>Td</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Td
* @generated
*/
public Adapter createTdAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Tfoot <em>Tfoot</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Tfoot
* @generated
*/
public Adapter createTfootAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Th <em>Th</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Th
* @generated
*/
public Adapter createThAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Thead <em>Thead</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Thead
* @generated
*/
public Adapter createTheadAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Tr <em>Tr</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Tr
* @generated
*/
public Adapter createTrAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Tt <em>Tt</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Tt
* @generated
*/
public Adapter createTtAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Ul <em>Ul</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Ul
* @generated
*/
public Adapter createUlAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Var <em>Var</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.Var
* @generated
*/
public Adapter createVarAdapter() {
return null;
}
/**
* Creates a new adapter for the default case.
* <!-- begin-user-doc -->
* This default implementation returns null.
* <!-- end-user-doc -->
* @return the new adapter.
* @generated
*/
public Adapter createEObjectAdapter() {
return null;
}
} // XhtmlAdapterFactory
| epl-1.0 |
OpenLiberty/open-liberty | dev/com.ibm.ws.anno/publish/appData/SCITest.ear/SCIAbsoluteNoOthers.war/WEB-INF/source/com/ibm/ws/servletContainerInitializer/SCIParentServlet.java | 4231 | /*******************************************************************************
* Copyright (c) 2017 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.servletContainerInitializer;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public abstract class SCIParentServlet extends HttpServlet {
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doSuperPost(String servletName, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
ServletOutputStream os = response.getOutputStream();
String s = servletName +" successful";
System.out.println(s);
os.println(s);
ServletContext ctx = request.getServletContext();
if ("executed".equals(ctx.getAttribute("SCIImpl"))) {
os.println("SCIImpl executed");
}
if ("executed".equals(ctx.getAttribute("SCIImpl2"))) {
os.println("SCIImpl2 executed");
}
if ("executed".equals(ctx.getAttribute("SCIImpl2b"))) {
os.println("SCIImpl2b executed");
}
if ("executed".equals(ctx.getAttribute("SCIImpl3"))) {
os.println("SCIImpl3 executed");
}
if ("executed".equals(ctx.getAttribute("ListenerImplContextInitialized"))) {
os.println("ListenerImplContextInitialized executed");
}
if ("executed".equals(ctx.getAttribute("SCIImpl4"))) {
os.println("SCIImpl4 executed");
}
if ("executed".equals(ctx.getAttribute("SCIImplRelative1"))) {
os.println("SCIImplRelative1 executed");
}
if ("executed".equals(ctx.getAttribute("SCIImplRelative2"))) {
os.println("SCIImplRelative2 executed");
}
if ("executed".equals(ctx.getAttribute("SCIImplRelative3"))) {
os.println("SCIImplRelative3 executed");
}
if ("true".equals(ctx.getAttribute("UnsupportedOperationExceptionThrown"))) {
os.println("UnsupportedOperationExceptionThrown is true (from Listener)");
}
HashMap<String, Set<Class<?>>> classesSetHashMap = (HashMap<String, Set<Class<?>>>) ctx.getAttribute("classesSetHashMap");
if (classesSetHashMap!=null) {
Set<String> keySet=classesSetHashMap.keySet();
String[] keys = convertToStringArray(keySet);
for (String key:keys) {
Set<Class<?>> classesSet = classesSetHashMap.get(key);
//do I need to check if classesSet is null
String[] classArray = convertClassesToStringArray(classesSet);
for (String className:classArray) {
os.println("Class "+className+" was sent to onStartup method of " + key);
}
}
}
}
private String[] convertToStringArray(Set<String> stringSet) {
String[] s = stringSet.toArray(new String[0]);//new String[stringSet.size()];
//Object[] o = stringSet.toArray();
//s = (String[])stringSet.toArray();
Arrays.sort(s);
return s;
}
private String[] convertClassesToStringArray(Set<Class<?>> classSet) {
String[] classArray = new String[classSet.size()];
int i=0;
for (Iterator<Class<?>> it = classSet.iterator();it.hasNext();) {
classArray[i++] = ((Class<?>)it.next()).getName();
}
Arrays.sort(classArray);
return classArray;
}
}
| epl-1.0 |
sgilda/windup | forks/procyon/Procyon.CompilerTools/src/test/java/com/strobel/decompiler/ConditionalTests.java | 32692 | package com.strobel.decompiler;
import org.junit.Test;
import java.util.List;
import java.util.Set;
public class ConditionalTests extends DecompilerTest {
private static class A {
public boolean test(final List<Object> list, final Set<Object> set) {
if (list == null) {
if (set == null) {
System.out.println("a");
}
else {
System.out.println("b");
}
}
else if (set == null) {
System.out.println("c");
}
else if (list.isEmpty()) {
if (set.isEmpty()) {
System.out.println("d");
}
else {
System.out.println("e");
}
}
else if (set.size() < list.size()) {
System.out.println("f");
}
else {
System.out.println("g");
}
return true;
}
}
@SuppressWarnings("StatementWithEmptyBody")
private static class B {
public boolean test(final List<Object> list, final Set<Object> set) {
if (list == null) {
if (set == null) {
System.out.println("B");
}
else {
}
}
else if (set == null) {
if (list.isEmpty()) {
System.out.println("E");
}
else {
}
}
return true;
}
}
@SuppressWarnings("ConstantConditions")
private static class C {
public boolean test1(final boolean a, final boolean b, boolean c) {
System.out.println((b && a == (c = b) && b) || !c);
return c;
}
public boolean test2(final boolean a, final boolean b, boolean c) {
System.out.println((b && a == (c = b)) || !c);
return c;
}
public boolean test3(final boolean a, final boolean b, boolean c) {
System.out.println(b && a || (c = b) || !c);
return c;
}
public boolean test4(final boolean a, final boolean b, boolean c) {
System.out.println(b && (c = a) || !c);
return c;
}
public boolean test5(final boolean a, final boolean b, boolean c) {
System.out.println(b || (c = a) || !c);
return c;
}
public boolean test6(final boolean a, final boolean b, boolean c) {
System.out.println(b && (c = a));
return c;
}
public boolean test7(final boolean a, final boolean b, boolean c) {
System.out.println(b || (c = a));
return c;
}
public boolean test8(final boolean a, final boolean b, boolean c) {
System.out.println(b && a == (c = b) && b && c);
return c;
}
}
private static class D {
public boolean test(final boolean a, final boolean b, final boolean c, final boolean d) {
return (a ? b : c) ? d : (c ? b : a);
}
}
@SuppressWarnings("ConstantConditions")
private static class E {
public boolean test1(final boolean a, final boolean b, final boolean c) {
System.out.println(a || (c ? a : b));
return c;
}
public boolean test2(final boolean a, final boolean b, final boolean c) {
System.out.println(a && (c ? a : b));
return c;
}
public boolean test3(final boolean a, final boolean b, final boolean c) {
System.out.println(!a || (c ? a : b));
return c;
}
public boolean test4(final boolean a, final boolean b, final boolean c) {
System.out.println(!a && (c ? a : b));
return c;
}
public boolean test5(final boolean a, final boolean b, final boolean c) {
System.out.println(a && (c ? (b ? a : c) : (b ? c : a)));
return c;
}
public boolean test6(final boolean a, final boolean b, final boolean c) {
System.out.println(a || (c ? (b ? a : c) : (b ? c : a)));
return c;
}
}
@SuppressWarnings("ConstantConditions")
private static class F {
private boolean c;
public boolean test1(final boolean a, final boolean b) {
System.out.println((b && a == (this.c = b) && b) || !this.c);
return this.c;
}
public boolean test2(final boolean a, final boolean b) {
System.out.println((b && a == (this.c = b)) || !this.c);
return this.c;
}
public boolean test3(final boolean a, final boolean b) {
System.out.println((b && a) || (this.c = b) || !this.c);
return this.c;
}
public boolean test4(final boolean a, final boolean b) {
System.out.println((b && (this.c = a)) || !this.c);
return this.c;
}
public boolean test5(final boolean a, final boolean b) {
System.out.println(b || (this.c = a) || !this.c);
return this.c;
}
public boolean test6(final boolean a, final boolean b) {
System.out.println(b && (this.c = a));
return this.c;
}
public boolean test7(final boolean a, final boolean b) {
System.out.println(b || (this.c = a));
return this.c;
}
public boolean test8(final boolean a, final boolean b) {
System.out.println(b && a == (this.c = b) && b && this.c);
return this.c;
}
}
@SuppressWarnings("ConstantConditions")
private static class G {
private static boolean c;
public boolean test1(final boolean a, final boolean b) {
System.out.println((b && a == (G.c = b) && b) || !G.c);
return G.c;
}
public boolean test2(final boolean a, final boolean b) {
System.out.println((b && a == (G.c = b)) || !G.c);
return G.c;
}
public boolean test3(final boolean a, final boolean b) {
System.out.println((b && a) || (G.c = b) || !G.c);
return G.c;
}
public boolean test4(final boolean a, final boolean b) {
System.out.println((b && (G.c = a)) || !G.c);
return G.c;
}
public boolean test5(final boolean a, final boolean b) {
System.out.println(b || (G.c = a) || !G.c);
return G.c;
}
public boolean test6(final boolean a, final boolean b) {
System.out.println(b && (G.c = a));
return G.c;
}
public boolean test7(final boolean a, final boolean b) {
System.out.println(b || (G.c = a));
return G.c;
}
public boolean test8(final boolean a, final boolean b) {
System.out.println(b && a == (G.c = b) && b && G.c);
return G.c;
}
}
@SuppressWarnings("ConstantConditions")
private static class H {
public boolean test1(final boolean a, final boolean b, final boolean[] c) {
System.out.println((b && a == (c[0] = b) && b) || !c[0]);
return c[0];
}
public boolean test2(final boolean a, final boolean b, final boolean[] c) {
System.out.println((b && a == (c[0] = b)) || !c[0]);
return c[0];
}
public boolean test3(final boolean a, final boolean b, final boolean[] c) {
System.out.println((b && a) || (c[0] = b) || !c[0]);
return c[0];
}
public boolean test4(final boolean a, final boolean b, final boolean[] c) {
System.out.println((b && (c[0] = a)) || !c[0]);
return c[0];
}
public boolean test5(final boolean a, final boolean b, final boolean[] c) {
System.out.println(b || (c[0] = a) || !c[0]);
return c[0];
}
public boolean test6(final boolean a, final boolean b, final boolean[] c) {
System.out.println(b && (c[0] = a));
return c[0];
}
public boolean test7(final boolean a, final boolean b, final boolean[] c) {
System.out.println(b || (c[0] = a));
return c[0];
}
public boolean test8(final boolean a, final boolean b, final boolean[] c) {
System.out.println(b && a == (c[0] = b) && b && c[0]);
return c[0];
}
}
@SuppressWarnings({ "ConstantConditions", "UnusedDeclaration" })
private static class I {
private boolean[] c;
public boolean test1(final boolean a, final boolean b) {
System.out.println((b && a == (this.c[0] = b) && b) || !this.c[0]);
return this.c[0];
}
public boolean test2(final boolean a, final boolean b) {
System.out.println((b && a == (this.c[0] = b)) || !this.c[0]);
return this.c[0];
}
public boolean test3(final boolean a, final boolean b) {
System.out.println((b && a) || (this.c[0] = b) || !this.c[0]);
return this.c[0];
}
public boolean test4(final boolean a, final boolean b) {
System.out.println((b && (this.c[0] = a)) || !this.c[0]);
return this.c[0];
}
public boolean test5(final boolean a, final boolean b) {
System.out.println(b || (this.c[0] = a) || !this.c[0]);
return this.c[0];
}
public boolean test6(final boolean a, final boolean b) {
System.out.println(b && (this.c[0] = a));
return this.c[0];
}
public boolean test7(final boolean a, final boolean b) {
System.out.println(b || (this.c[0] = a));
return this.c[0];
}
public boolean test8(final boolean a, final boolean b) {
System.out.println(b && a == (this.c[0] = b) && b && this.c[0]);
return this.c[0];
}
}
@SuppressWarnings({ "ConstantConditions", "UnusedDeclaration" })
private static class J {
private static boolean[] c;
public boolean test1(final boolean a, final boolean b) {
System.out.println((b && a == (J.c[0] = b) && b) || !J.c[0]);
return J.c[0];
}
public boolean test2(final boolean a, final boolean b) {
System.out.println((b && a == (J.c[0] = b)) || !J.c[0]);
return J.c[0];
}
public boolean test3(final boolean a, final boolean b) {
System.out.println((b && a) || (J.c[0] = b) || !J.c[0]);
return J.c[0];
}
public boolean test4(final boolean a, final boolean b) {
System.out.println((b && (J.c[0] = a)) || !J.c[0]);
return J.c[0];
}
public boolean test5(final boolean a, final boolean b) {
System.out.println(b || (J.c[0] = a) || !J.c[0]);
return J.c[0];
}
public boolean test6(final boolean a, final boolean b) {
System.out.println(b && (J.c[0] = a));
return J.c[0];
}
public boolean test7(final boolean a, final boolean b) {
System.out.println(b || (J.c[0] = a));
return J.c[0];
}
public boolean test8(final boolean a, final boolean b) {
System.out.println(b && a == (J.c[0] = b) && b && J.c[0]);
return J.c[0];
}
}
@SuppressWarnings("UnusedDeclaration")
private static class K {
private static int i;
private int j;
private static int index() {
return 0;
}
public boolean test1(final boolean a, final boolean b, final boolean[] c) {
System.out.println((a && c[0] == (c[0] = b) && b) || !c[0]);
return c[0];
}
public boolean test2(final boolean a, final boolean b, final boolean[] c) {
System.out.println((b && a == (c[0] = !c[0])) || !c[0]);
return c[0];
}
public boolean test3(final boolean a, final boolean b, final boolean[] c) {
System.out.println((a && c[index()] == (c[index()] = b) && b) || !c[index()]);
return c[index()];
}
public boolean test4(final boolean a, final boolean b, final boolean[] c) {
System.out.println((b && a == (c[index()] = !c[index()])) || !c[index()]);
return c[index()];
}
public boolean test5(final boolean a, final boolean b, final boolean[] c) {
System.out.println((a && c[index() + K.i] == (c[index() + this.j] = b) && b) || !c[index() + this.j]);
return c[index() + K.i];
}
public boolean test6(final boolean a, final boolean b, final boolean[] c) {
System.out.println((b && a == (c[index() + K.i] = !c[index() + this.j])) || !c[index() + this.j]);
return c[index() + K.i];
}
}
@Test
public void testComplexIfElse() throws Throwable {
verifyOutput(
A.class,
defaultSettings(),
"private static class A {\n" +
" public boolean test(final List<Object> list, final Set<Object> set) {\n" +
" if (list == null) {\n" +
" if (set == null) {\n" +
" System.out.println(\"a\");\n" +
" }\n" +
" else {\n" +
" System.out.println(\"b\");\n" +
" }\n" +
" }\n" +
" else if (set == null) {\n" +
" System.out.println(\"c\");\n" +
" }\n" +
" else if (list.isEmpty()) {\n" +
" if (set.isEmpty()) {\n" +
" System.out.println(\"d\");\n" +
" }\n" +
" else {\n" +
" System.out.println(\"e\");\n" +
" }\n" +
" }\n" +
" else if (set.size() < list.size()) {\n" +
" System.out.println(\"f\");\n" +
" }\n" +
" else {\n" +
" System.out.println(\"g\");\n" +
" }\n" +
" return true;\n" +
" }\n" +
"}\n"
);
}
@Test
public void testEmptyElseBlocks() throws Throwable {
verifyOutput(
B.class,
defaultSettings(),
"private static class B {\n" +
" public boolean test(final List<Object> list, final Set<Object> set) {\n" +
" if (list == null) {\n" +
" if (set == null) {\n" +
" System.out.println(\"B\");\n" +
" }\n" +
" }\n" +
" else if (set == null && list.isEmpty()) {\n" +
" System.out.println(\"E\");\n" +
" }\n" +
" return true;\n" +
" }\n" +
"}\n"
);
}
@Test
public void testShortCircuitEmbeddedAssignments() throws Throwable {
verifyOutput(
C.class,
defaultSettings(),
"private static class C {\n" +
" public boolean test1(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println((b && a == (c = b) && b) || !c);\n" +
" return c;\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println((b && a == (c = b)) || !c);\n" +
" return c;\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println((b && a) || (c = b) || !c);\n" +
" return c;\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println((b && (c = a)) || !c);\n" +
" return c;\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println(b || (c = a) || !c);\n" +
" return c;\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println(b && (c = a));\n" +
" return c;\n" +
" }\n" +
" public boolean test7(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println(b || (c = a));\n" +
" return c;\n" +
" }\n" +
" public boolean test8(final boolean a, final boolean b, boolean c) {\n" +
" System.out.println(b && a == (c = b) && b && c);\n" +
" return c;\n" +
" }\n" +
"}"
);
}
@Test
public void testTernaryWithTernaryCondition() throws Throwable {
verifyOutput(
D.class,
defaultSettings(),
"private static class D {\n" +
" public boolean test(final boolean a, final boolean b, final boolean c, final boolean d) {\n" +
" return (a ? b : c) ? d : (c ? b : a);\n" +
" }\n" +
"}\n"
);
}
@Test
public void testLogicalAndOrWithConditionals() throws Throwable {
verifyOutput(
E.class,
defaultSettings(),
"private static class E {\n" +
" public boolean test1(final boolean a, final boolean b, final boolean c) {\n" +
" System.out.println(a || (c ? a : b));\n" +
" return c;\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b, final boolean c) {\n" +
" System.out.println(a && (c ? a : b));\n" +
" return c;\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b, final boolean c) {\n" +
" System.out.println(!a || (c ? a : b));\n" +
" return c;\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b, final boolean c) {\n" +
" System.out.println(!a && (c ? a : b));\n" +
" return c;\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b, final boolean c) {\n" +
" System.out.println(a && (c ? (b ? a : c) : (b ? c : a)));\n" +
" return c;\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b, final boolean c) {\n" +
" System.out.println(a || (c ? (b ? a : c) : (b ? c : a)));\n" +
" return c;\n" +
" }\n" +
"}"
);
}
@Test
public void testShortCircuitEmbeddedInstanceFieldAssignments() throws Throwable {
verifyOutput(
F.class,
defaultSettings(),
"private static class F {\n" +
" private boolean c;\n" +
" public boolean test1(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (this.c = b) && b) || !this.c);\n" +
" return this.c;\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (this.c = b)) || !this.c);\n" +
" return this.c;\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b) {\n" +
" System.out.println((b && a) || (this.c = b) || !this.c);\n" +
" return this.c;\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b) {\n" +
" System.out.println((b && (this.c = a)) || !this.c);\n" +
" return this.c;\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b) {\n" +
" System.out.println(b || (this.c = a) || !this.c);\n" +
" return this.c;\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b) {\n" +
" System.out.println(b && (this.c = a));\n" +
" return this.c;\n" +
" }\n" +
" public boolean test7(final boolean a, final boolean b) {\n" +
" System.out.println(b || (this.c = a));\n" +
" return this.c;\n" +
" }\n" +
" public boolean test8(final boolean a, final boolean b) {\n" +
" System.out.println(b && a == (this.c = b) && b && this.c);\n" +
" return this.c;\n" +
" }\n" +
"}\n"
);
}
@Test
public void testShortCircuitEmbeddedStaticFieldAssignments() throws Throwable {
verifyOutput(
G.class,
defaultSettings(),
"private static class G {\n" +
" private static boolean c;\n" +
" public boolean test1(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (G.c = b) && b) || !G.c);\n" +
" return G.c;\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (G.c = b)) || !G.c);\n" +
" return G.c;\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b) {\n" +
" System.out.println((b && a) || (G.c = b) || !G.c);\n" +
" return G.c;\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b) {\n" +
" System.out.println((b && (G.c = a)) || !G.c);\n" +
" return G.c;\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b) {\n" +
" System.out.println(b || (G.c = a) || !G.c);\n" +
" return G.c;\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b) {\n" +
" System.out.println(b && (G.c = a));\n" +
" return G.c;\n" +
" }\n" +
" public boolean test7(final boolean a, final boolean b) {\n" +
" System.out.println(b || (G.c = a));\n" +
" return G.c;\n" +
" }\n" +
" public boolean test8(final boolean a, final boolean b) {\n" +
" System.out.println(b && a == (G.c = b) && b && G.c);\n" +
" return G.c;\n" +
" }\n" +
"}\n"
);
}
@Test
public void testShortCircuitEmbeddedArrayAssignments() throws Throwable {
verifyOutput(
H.class,
defaultSettings(),
"private static class H {\n" +
" public boolean test1(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((b && a == (c[0] = b) && b) || !c[0]);\n" +
" return c[0];\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((b && a == (c[0] = b)) || !c[0]);\n" +
" return c[0];\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((b && a) || (c[0] = b) || !c[0]);\n" +
" return c[0];\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((b && (c[0] = a)) || !c[0]);\n" +
" return c[0];\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println(b || (c[0] = a) || !c[0]);\n" +
" return c[0];\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println(b && (c[0] = a));\n" +
" return c[0];\n" +
" }\n" +
" public boolean test7(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println(b || (c[0] = a));\n" +
" return c[0];\n" +
" }\n" +
" public boolean test8(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println(b && a == (c[0] = b) && b && c[0]);\n" +
" return c[0];\n" +
" }\n" +
"}\n"
);
}
@Test
public void testShortCircuitEmbeddedInstanceArrayAssignments() throws Throwable {
verifyOutput(
I.class,
defaultSettings(),
"private static class I {\n" +
" private boolean[] c;\n" +
" public boolean test1(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (this.c[0] = b) && b) || !this.c[0]);\n" +
" return this.c[0];\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (this.c[0] = b)) || !this.c[0]);\n" +
" return this.c[0];\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b) {\n" +
" System.out.println((b && a) || (this.c[0] = b) || !this.c[0]);\n" +
" return this.c[0];\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b) {\n" +
" System.out.println((b && (this.c[0] = a)) || !this.c[0]);\n" +
" return this.c[0];\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b) {\n" +
" System.out.println(b || (this.c[0] = a) || !this.c[0]);\n" +
" return this.c[0];\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b) {\n" +
" System.out.println(b && (this.c[0] = a));\n" +
" return this.c[0];\n" +
" }\n" +
" public boolean test7(final boolean a, final boolean b) {\n" +
" System.out.println(b || (this.c[0] = a));\n" +
" return this.c[0];\n" +
" }\n" +
" public boolean test8(final boolean a, final boolean b) {\n" +
" System.out.println(b && a == (this.c[0] = b) && b && this.c[0]);\n" +
" return this.c[0];\n" +
" }\n" +
"}\n"
);
}
@Test
public void testShortCircuitEmbeddedStaticArrayAssignments() throws Throwable {
verifyOutput(
J.class,
defaultSettings(),
"private static class J {\n" +
" private static boolean[] c;\n" +
" public boolean test1(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (J.c[0] = b) && b) || !J.c[0]);\n" +
" return J.c[0];\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b) {\n" +
" System.out.println((b && a == (J.c[0] = b)) || !J.c[0]);\n" +
" return J.c[0];\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b) {\n" +
" System.out.println((b && a) || (J.c[0] = b) || !J.c[0]);\n" +
" return J.c[0];\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b) {\n" +
" System.out.println((b && (J.c[0] = a)) || !J.c[0]);\n" +
" return J.c[0];\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b) {\n" +
" System.out.println(b || (J.c[0] = a) || !J.c[0]);\n" +
" return J.c[0];\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b) {\n" +
" System.out.println(b && (J.c[0] = a));\n" +
" return J.c[0];\n" +
" }\n" +
" public boolean test7(final boolean a, final boolean b) {\n" +
" System.out.println(b || (J.c[0] = a));\n" +
" return J.c[0];\n" +
" }\n" +
" public boolean test8(final boolean a, final boolean b) {\n" +
" System.out.println(b && a == (J.c[0] = b) && b && J.c[0]);\n" +
" return J.c[0];\n" +
" }\n" +
"}\n"
);
}
@Test
public void testShortCircuitEmbeddedSelfReferencingArrayAssignments() throws Throwable {
verifyOutput(
K.class,
defaultSettings(),
"private static class K {\n" +
" private static int i;\n" +
" private int j;\n" +
" private static int index() {\n" +
" return 0;\n" +
" }\n" +
" public boolean test1(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((a && c[0] == (c[0] = b) && b) || !c[0]);\n" +
" return c[0];\n" +
" }\n" +
" public boolean test2(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((b && a == (c[0] = !c[0])) || !c[0]);\n" +
" return c[0];\n" +
" }\n" +
" public boolean test3(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((a && c[index()] == (c[index()] = b) && b) || !c[index()]);\n" +
" return c[index()];\n" +
" }\n" +
" public boolean test4(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((b && a == (c[index()] = !c[index()])) || !c[index()]);\n" +
" return c[index()];\n" +
" }\n" +
" public boolean test5(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((a && c[index() + K.i] == (c[index() + this.j] = b) && b) || !c[index() + this.j]);\n" +
" return c[index() + K.i];\n" +
" }\n" +
" public boolean test6(final boolean a, final boolean b, final boolean[] c) {\n" +
" System.out.println((b && a == (c[index() + K.i] = !c[index() + this.j])) || !c[index() + this.j]);\n" +
" return c[index() + K.i];\n" +
" }\n" +
"}\n"
);
}
}
| epl-1.0 |
filipefilardi/Telegram | TMessagesProj/src/main/java/org/telegram/ui/GroupCreateFinalActivity.java | 28386 | /*
* This is the source code of Telegram for Android v. 3.x.x.
* It is licensed under GNU GPL v. 2 or later.
* You should have received a copy of the license in this archive (see LICENSE).
*
* Copyright Nikolai Kudashov, 2013-2017.
*/
package org.telegram.ui;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Vibrator;
import android.text.Editable;
import android.text.InputFilter;
import android.text.InputType;
import android.text.TextWatcher;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import org.telegram.messenger.AndroidUtilities;
import org.telegram.messenger.ChatObject;
import org.telegram.messenger.LocaleController;
import org.telegram.messenger.MessagesStorage;
import org.telegram.messenger.support.widget.LinearLayoutManager;
import org.telegram.messenger.support.widget.RecyclerView;
import org.telegram.tgnet.ConnectionsManager;
import org.telegram.tgnet.TLRPC;
import org.telegram.messenger.FileLog;
import org.telegram.messenger.MessagesController;
import org.telegram.messenger.NotificationCenter;
import org.telegram.messenger.R;
import org.telegram.ui.ActionBar.ActionBarMenuItem;
import org.telegram.ui.ActionBar.AlertDialog;
import org.telegram.ui.ActionBar.Theme;
import org.telegram.ui.ActionBar.ThemeDescription;
import org.telegram.ui.Cells.GroupCreateSectionCell;
import org.telegram.ui.Cells.GroupCreateUserCell;
import org.telegram.ui.ActionBar.ActionBar;
import org.telegram.ui.ActionBar.ActionBarMenu;
import org.telegram.ui.Components.AvatarDrawable;
import org.telegram.ui.Components.AvatarUpdater;
import org.telegram.ui.Components.BackupImageView;
import org.telegram.ui.ActionBar.BaseFragment;
import org.telegram.ui.Components.ContextProgressView;
import org.telegram.ui.Components.EditTextBoldCursor;
import org.telegram.ui.Components.GroupCreateDividerItemDecoration;
import org.telegram.ui.Components.LayoutHelper;
import org.telegram.ui.Components.RecyclerListView;
import java.util.ArrayList;
import java.util.concurrent.Semaphore;
public class GroupCreateFinalActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate, AvatarUpdater.AvatarUpdaterDelegate {
private GroupCreateAdapter adapter;
private RecyclerView listView;
private EditTextBoldCursor editText;
private BackupImageView avatarImage;
private AvatarDrawable avatarDrawable;
private ActionBarMenuItem doneItem;
private ContextProgressView progressView;
private AnimatorSet doneItemAnimation;
private FrameLayout editTextContainer;
private TLRPC.FileLocation avatar;
private TLRPC.InputFile uploadedAvatar;
private ArrayList<Integer> selectedContacts;
private boolean createAfterUpload;
private boolean donePressed;
private AvatarUpdater avatarUpdater = new AvatarUpdater();
private String nameToSet;
private int chatType = ChatObject.CHAT_TYPE_CHAT;
private int reqId;
private final static int done_button = 1;
public GroupCreateFinalActivity(Bundle args) {
super(args);
chatType = args.getInt("chatType", ChatObject.CHAT_TYPE_CHAT);
avatarDrawable = new AvatarDrawable();
}
@SuppressWarnings("unchecked")
@Override
public boolean onFragmentCreate() {
NotificationCenter.getInstance().addObserver(this, NotificationCenter.updateInterfaces);
NotificationCenter.getInstance().addObserver(this, NotificationCenter.chatDidCreated);
NotificationCenter.getInstance().addObserver(this, NotificationCenter.chatDidFailCreate);
avatarUpdater.parentFragment = this;
avatarUpdater.delegate = this;
selectedContacts = getArguments().getIntegerArrayList("result");
final ArrayList<Integer> usersToLoad = new ArrayList<>();
for (int a = 0; a < selectedContacts.size(); a++) {
Integer uid = selectedContacts.get(a);
if (MessagesController.getInstance().getUser(uid) == null) {
usersToLoad.add(uid);
}
}
if (!usersToLoad.isEmpty()) {
final Semaphore semaphore = new Semaphore(0);
final ArrayList<TLRPC.User> users = new ArrayList<>();
MessagesStorage.getInstance().getStorageQueue().postRunnable(new Runnable() {
@Override
public void run() {
users.addAll(MessagesStorage.getInstance().getUsers(usersToLoad));
semaphore.release();
}
});
try {
semaphore.acquire();
} catch (Exception e) {
FileLog.e(e);
}
if (usersToLoad.size() != users.size()) {
return false;
}
if (!users.isEmpty()) {
for (TLRPC.User user : users) {
MessagesController.getInstance().putUser(user, true);
}
} else {
return false;
}
}
return super.onFragmentCreate();
}
@Override
public void onFragmentDestroy() {
super.onFragmentDestroy();
NotificationCenter.getInstance().removeObserver(this, NotificationCenter.updateInterfaces);
NotificationCenter.getInstance().removeObserver(this, NotificationCenter.chatDidCreated);
NotificationCenter.getInstance().removeObserver(this, NotificationCenter.chatDidFailCreate);
avatarUpdater.clear();
if (reqId != 0) {
ConnectionsManager.getInstance().cancelRequest(reqId, true);
}
}
@Override
public void onResume() {
super.onResume();
if (adapter != null) {
adapter.notifyDataSetChanged();
}
}
@Override
public View createView(Context context) {
actionBar.setBackButtonImage(R.drawable.ic_ab_back);
actionBar.setAllowOverlayTitle(true);
actionBar.setTitle(LocaleController.getString("NewGroup", R.string.NewGroup));
actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() {
@Override
public void onItemClick(int id) {
if (id == -1) {
finishFragment();
} else if (id == done_button) {
if (donePressed) {
return;
}
if (editText.length() == 0) {
Vibrator v = (Vibrator) getParentActivity().getSystemService(Context.VIBRATOR_SERVICE);
if (v != null) {
v.vibrate(200);
}
AndroidUtilities.shakeView(editText, 2, 0);
return;
}
donePressed = true;
AndroidUtilities.hideKeyboard(editText);
editText.setEnabled(false);
if (avatarUpdater.uploadingAvatar != null) {
createAfterUpload = true;
} else {
showEditDoneProgress(true);
reqId = MessagesController.getInstance().createChat(editText.getText().toString(), selectedContacts, null, chatType, GroupCreateFinalActivity.this);
}
}
}
});
ActionBarMenu menu = actionBar.createMenu();
doneItem = menu.addItemWithWidth(done_button, R.drawable.ic_done, AndroidUtilities.dp(56));
progressView = new ContextProgressView(context, 1);
doneItem.addView(progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT));
progressView.setVisibility(View.INVISIBLE);
fragmentView = new LinearLayout(context) {
@Override
protected boolean drawChild(Canvas canvas, View child, long drawingTime) {
boolean result = super.drawChild(canvas, child, drawingTime);
if (child == listView) {
parentLayout.drawHeaderShadow(canvas, editTextContainer.getMeasuredHeight());
}
return result;
}
};
LinearLayout linearLayout = (LinearLayout) fragmentView;
linearLayout.setOrientation(LinearLayout.VERTICAL);
editTextContainer = new FrameLayout(context);
linearLayout.addView(editTextContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT));
avatarImage = new BackupImageView(context);
avatarImage.setRoundRadius(AndroidUtilities.dp(32));
avatarDrawable.setInfo(5, null, null, chatType == ChatObject.CHAT_TYPE_BROADCAST);
avatarImage.setImageDrawable(avatarDrawable);
editTextContainer.addView(avatarImage, LayoutHelper.createFrame(64, 64, Gravity.TOP | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), LocaleController.isRTL ? 0 : 16, 16, LocaleController.isRTL ? 16 : 0, 16));
avatarDrawable.setDrawPhoto(true);
avatarImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (getParentActivity() == null) {
return;
}
AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity());
CharSequence[] items;
if (avatar != null) {
items = new CharSequence[]{LocaleController.getString("FromCamera", R.string.FromCamera), LocaleController.getString("FromGalley", R.string.FromGalley), LocaleController.getString("DeletePhoto", R.string.DeletePhoto)};
} else {
items = new CharSequence[]{LocaleController.getString("FromCamera", R.string.FromCamera), LocaleController.getString("FromGalley", R.string.FromGalley)};
}
builder.setItems(items, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
if (i == 0) {
avatarUpdater.openCamera();
} else if (i == 1) {
avatarUpdater.openGallery();
} else if (i == 2) {
avatar = null;
uploadedAvatar = null;
avatarImage.setImage(avatar, "50_50", avatarDrawable);
}
}
});
showDialog(builder.create());
}
});
editText = new EditTextBoldCursor(context);
editText.setHint(chatType == ChatObject.CHAT_TYPE_CHAT ? LocaleController.getString("EnterGroupNamePlaceholder", R.string.EnterGroupNamePlaceholder) : LocaleController.getString("EnterListName", R.string.EnterListName));
if (nameToSet != null) {
editText.setText(nameToSet);
nameToSet = null;
}
editText.setMaxLines(4);
editText.setGravity(Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT));
editText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18);
editText.setHintTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteHintText));
editText.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText));
editText.setBackgroundDrawable(Theme.createEditTextDrawable(context, false));
editText.setImeOptions(EditorInfo.IME_FLAG_NO_EXTRACT_UI);
editText.setInputType(InputType.TYPE_TEXT_FLAG_CAP_SENTENCES);
editText.setPadding(0, 0, 0, AndroidUtilities.dp(8));
InputFilter[] inputFilters = new InputFilter[1];
inputFilters[0] = new InputFilter.LengthFilter(100);
editText.setFilters(inputFilters);
editText.setCursorColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText));
editText.setCursorSize(AndroidUtilities.dp(20));
editText.setCursorWidth(1.5f);
editTextContainer.addView(editText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, LocaleController.isRTL ? 16 : 96, 0, LocaleController.isRTL ? 96 : 16, 0));
editText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
avatarDrawable.setInfo(5, editText.length() > 0 ? editText.getText().toString() : null, null, false);
avatarImage.invalidate();
}
});
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false);
listView = new RecyclerListView(context);
listView.setAdapter(adapter = new GroupCreateAdapter(context));
listView.setLayoutManager(linearLayoutManager);
listView.setVerticalScrollBarEnabled(false);
listView.setVerticalScrollbarPosition(LocaleController.isRTL ? View.SCROLLBAR_POSITION_LEFT : View.SCROLLBAR_POSITION_RIGHT);
listView.addItemDecoration(new GroupCreateDividerItemDecoration());
linearLayout.addView(listView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT));
listView.setOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
if (newState == RecyclerView.SCROLL_STATE_DRAGGING) {
AndroidUtilities.hideKeyboard(editText);
}
}
});
return fragmentView;
}
@Override
public void didUploadedPhoto(final TLRPC.InputFile file, final TLRPC.PhotoSize small, final TLRPC.PhotoSize big) {
AndroidUtilities.runOnUIThread(new Runnable() {
@Override
public void run() {
uploadedAvatar = file;
avatar = small.location;
avatarImage.setImage(avatar, "50_50", avatarDrawable);
if (createAfterUpload) {
MessagesController.getInstance().createChat(editText.getText().toString(), selectedContacts, null, chatType, GroupCreateFinalActivity.this);
}
}
});
}
@Override
public void onActivityResultFragment(int requestCode, int resultCode, Intent data) {
avatarUpdater.onActivityResult(requestCode, resultCode, data);
}
@Override
public void saveSelfArgs(Bundle args) {
if (avatarUpdater != null && avatarUpdater.currentPicturePath != null) {
args.putString("path", avatarUpdater.currentPicturePath);
}
if (editText != null) {
String text = editText.getText().toString();
if (text != null && text.length() != 0) {
args.putString("nameTextView", text);
}
}
}
@Override
public void restoreSelfArgs(Bundle args) {
if (avatarUpdater != null) {
avatarUpdater.currentPicturePath = args.getString("path");
}
String text = args.getString("nameTextView");
if (text != null) {
if (editText != null) {
editText.setText(text);
} else {
nameToSet = text;
}
}
}
@Override
public void onTransitionAnimationEnd(boolean isOpen, boolean backward) {
if (isOpen) {
editText.requestFocus();
AndroidUtilities.showKeyboard(editText);
}
}
@Override
public void didReceivedNotification(int id, final Object... args) {
if (id == NotificationCenter.updateInterfaces) {
if (listView == null) {
return;
}
int mask = (Integer) args[0];
if ((mask & MessagesController.UPDATE_MASK_AVATAR) != 0 || (mask & MessagesController.UPDATE_MASK_NAME) != 0 || (mask & MessagesController.UPDATE_MASK_STATUS) != 0) {
int count = listView.getChildCount();
for (int a = 0; a < count; a++) {
View child = listView.getChildAt(a);
if (child instanceof GroupCreateUserCell) {
((GroupCreateUserCell) child).update(mask);
}
}
}
} else if (id == NotificationCenter.chatDidFailCreate) {
reqId = 0;
donePressed = false;
showEditDoneProgress(false);
if (editText != null) {
editText.setEnabled(true);
}
} else if (id == NotificationCenter.chatDidCreated) {
reqId = 0;
int chat_id = (Integer) args[0];
NotificationCenter.getInstance().postNotificationName(NotificationCenter.closeChats);
Bundle args2 = new Bundle();
args2.putInt("chat_id", chat_id);
presentFragment(new ChatActivity(args2), true);
if (uploadedAvatar != null) {
MessagesController.getInstance().changeChatAvatar(chat_id, uploadedAvatar);
}
}
}
private void showEditDoneProgress(final boolean show) {
if (doneItem == null) {
return;
}
if (doneItemAnimation != null) {
doneItemAnimation.cancel();
}
doneItemAnimation = new AnimatorSet();
if (show) {
progressView.setVisibility(View.VISIBLE);
doneItem.setEnabled(false);
doneItemAnimation.playTogether(
ObjectAnimator.ofFloat(doneItem.getImageView(), "scaleX", 0.1f),
ObjectAnimator.ofFloat(doneItem.getImageView(), "scaleY", 0.1f),
ObjectAnimator.ofFloat(doneItem.getImageView(), "alpha", 0.0f),
ObjectAnimator.ofFloat(progressView, "scaleX", 1.0f),
ObjectAnimator.ofFloat(progressView, "scaleY", 1.0f),
ObjectAnimator.ofFloat(progressView, "alpha", 1.0f));
} else {
doneItem.getImageView().setVisibility(View.VISIBLE);
doneItem.setEnabled(true);
doneItemAnimation.playTogether(
ObjectAnimator.ofFloat(progressView, "scaleX", 0.1f),
ObjectAnimator.ofFloat(progressView, "scaleY", 0.1f),
ObjectAnimator.ofFloat(progressView, "alpha", 0.0f),
ObjectAnimator.ofFloat(doneItem.getImageView(), "scaleX", 1.0f),
ObjectAnimator.ofFloat(doneItem.getImageView(), "scaleY", 1.0f),
ObjectAnimator.ofFloat(doneItem.getImageView(), "alpha", 1.0f));
}
doneItemAnimation.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
if (doneItemAnimation != null && doneItemAnimation.equals(animation)) {
if (!show) {
progressView.setVisibility(View.INVISIBLE);
} else {
doneItem.getImageView().setVisibility(View.INVISIBLE);
}
}
}
@Override
public void onAnimationCancel(Animator animation) {
if (doneItemAnimation != null && doneItemAnimation.equals(animation)) {
doneItemAnimation = null;
}
}
});
doneItemAnimation.setDuration(150);
doneItemAnimation.start();
}
public class GroupCreateAdapter extends RecyclerListView.SelectionAdapter {
private Context context;
public GroupCreateAdapter(Context ctx) {
context = ctx;
}
@Override
public int getItemCount() {
return 1 + selectedContacts.size();
}
@Override
public boolean isEnabled(RecyclerView.ViewHolder holder) {
return false;
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view;
switch (viewType) {
case 0:
view = new GroupCreateSectionCell(context);
break;
default:
view = new GroupCreateUserCell(context, false);
break;
}
return new RecyclerListView.Holder(view);
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
switch (holder.getItemViewType()) {
case 0: {
GroupCreateSectionCell cell = (GroupCreateSectionCell) holder.itemView;
cell.setText(LocaleController.formatPluralString("Members", selectedContacts.size()));
break;
}
default: {
GroupCreateUserCell cell = (GroupCreateUserCell) holder.itemView;
TLRPC.User user = MessagesController.getInstance().getUser(selectedContacts.get(position - 1));
cell.setUser(user, null, null);
break;
}
}
}
@Override
public int getItemViewType(int position) {
switch (position) {
case 0:
return 0;
default:
return 1;
}
}
@Override
public void onViewRecycled(RecyclerView.ViewHolder holder) {
if (holder.getItemViewType() == 1) {
((GroupCreateUserCell) holder.itemView).recycle();
}
}
}
@Override
public ThemeDescription[] getThemeDescriptions() {
ThemeDescription.ThemeDescriptionDelegate сellDelegate = new ThemeDescription.ThemeDescriptionDelegate() {
@Override
public void didSetColor(int color) {
int count = listView.getChildCount();
for (int a = 0; a < count; a++) {
View child = listView.getChildAt(a);
if (child instanceof GroupCreateUserCell) {
((GroupCreateUserCell) child).update(0);
}
}
avatarDrawable.setInfo(5, editText.length() > 0 ? editText.getText().toString() : null, null, false);
avatarImage.invalidate();
}
};
return new ThemeDescription[]{
new ThemeDescription(fragmentView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_windowBackgroundWhite),
new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_actionBarDefault),
new ThemeDescription(listView, ThemeDescription.FLAG_LISTGLOWCOLOR, null, null, null, null, Theme.key_actionBarDefault),
new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_ITEMSCOLOR, null, null, null, null, Theme.key_actionBarDefaultIcon),
new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_TITLECOLOR, null, null, null, null, Theme.key_actionBarDefaultTitle),
new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SELECTORCOLOR, null, null, null, null, Theme.key_actionBarDefaultSelector),
new ThemeDescription(listView, ThemeDescription.FLAG_SELECTOR, null, null, null, null, Theme.key_listSelector),
new ThemeDescription(listView, ThemeDescription.FLAG_FASTSCROLL, null, null, null, null, Theme.key_fastScrollActive),
new ThemeDescription(listView, ThemeDescription.FLAG_FASTSCROLL, null, null, null, null, Theme.key_fastScrollInactive),
new ThemeDescription(listView, ThemeDescription.FLAG_FASTSCROLL, null, null, null, null, Theme.key_fastScrollText),
new ThemeDescription(listView, 0, new Class[]{View.class}, Theme.dividerPaint, null, null, Theme.key_divider),
new ThemeDescription(editText, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText),
new ThemeDescription(editText, ThemeDescription.FLAG_HINTTEXTCOLOR, null, null, null, null, Theme.key_groupcreate_hintText),
new ThemeDescription(editText, ThemeDescription.FLAG_CURSORCOLOR, null, null, null, null, Theme.key_groupcreate_cursor),
new ThemeDescription(editText, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_windowBackgroundWhiteInputField),
new ThemeDescription(editText, ThemeDescription.FLAG_BACKGROUNDFILTER | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, null, null, null, null, Theme.key_windowBackgroundWhiteInputFieldActivated),
new ThemeDescription(listView, ThemeDescription.FLAG_CELLBACKGROUNDCOLOR, new Class[]{GroupCreateSectionCell.class}, null, null, null, Theme.key_graySection),
new ThemeDescription(listView, 0, new Class[]{GroupCreateSectionCell.class}, new String[]{"drawable"}, null, null, null, Theme.key_groupcreate_sectionShadow),
new ThemeDescription(listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{GroupCreateSectionCell.class}, new String[]{"textView"}, null, null, null, Theme.key_groupcreate_sectionText),
new ThemeDescription(listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{GroupCreateUserCell.class}, new String[]{"textView"}, null, null, null, Theme.key_groupcreate_sectionText),
new ThemeDescription(listView, ThemeDescription.FLAG_TEXTCOLOR | ThemeDescription.FLAG_CHECKTAG, new Class[]{GroupCreateUserCell.class}, new String[]{"statusTextView"}, null, null, null, Theme.key_groupcreate_onlineText),
new ThemeDescription(listView, ThemeDescription.FLAG_TEXTCOLOR | ThemeDescription.FLAG_CHECKTAG, new Class[]{GroupCreateUserCell.class}, new String[]{"statusTextView"}, null, null, null, Theme.key_groupcreate_offlineText),
new ThemeDescription(listView, 0, new Class[]{GroupCreateUserCell.class}, null, new Drawable[]{Theme.avatar_photoDrawable, Theme.avatar_broadcastDrawable, Theme.avatar_savedDrawable}, сellDelegate, Theme.key_avatar_text),
new ThemeDescription(null, 0, null, null, null, сellDelegate, Theme.key_avatar_backgroundRed),
new ThemeDescription(null, 0, null, null, null, сellDelegate, Theme.key_avatar_backgroundOrange),
new ThemeDescription(null, 0, null, null, null, сellDelegate, Theme.key_avatar_backgroundViolet),
new ThemeDescription(null, 0, null, null, null, сellDelegate, Theme.key_avatar_backgroundGreen),
new ThemeDescription(null, 0, null, null, null, сellDelegate, Theme.key_avatar_backgroundCyan),
new ThemeDescription(null, 0, null, null, null, сellDelegate, Theme.key_avatar_backgroundBlue),
new ThemeDescription(null, 0, null, null, null, сellDelegate, Theme.key_avatar_backgroundPink),
new ThemeDescription(progressView, 0, null, null, null, null, Theme.key_contextProgressInner2),
new ThemeDescription(progressView, 0, null, null, null, null, Theme.key_contextProgressOuter2),
new ThemeDescription(editText, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText),
new ThemeDescription(editText, ThemeDescription.FLAG_HINTTEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteHintText),
};
}
}
| gpl-2.0 |
posbit/android | src/com/owncloud/android/files/managers/OCNotificationManager.java | 5935 | /* ownCloud Android client application
* Copyright (C) 2012 Bartek Przybylski
* Copyright (C) 2012-2013 ownCloud Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.owncloud.android.files.managers;
import java.util.HashMap;
import java.util.Map;
import com.owncloud.android.R;
import com.owncloud.android.utils.DisplayUtils;
import android.app.Notification;
import android.app.NotificationManager;
import android.content.Context;
import android.widget.RemoteViews;
public class OCNotificationManager {
enum NotificationType {
NOTIFICATION_SIMPLE,
NOTIFICATION_PROGRESS
}
static public class NotificationData {
private String mText, mSubtitle;
private int mPercent;
private boolean mOngoing;
public NotificationData(String text, String subtitle, boolean ongoing) {
this(text, subtitle, -1, ongoing);
}
public NotificationData(int percent, boolean ongoing) {
this(null, null, percent, ongoing);
}
public NotificationData(String text, int percent, boolean ongoing) {
this(text, null, percent, ongoing);
}
public NotificationData(String text, String subtitle, int percent, boolean ongoing) {
mText = text;
mPercent = percent;
mSubtitle = subtitle;
mOngoing = ongoing;
}
public String getText() { return mText; }
public int getPercent() { return mPercent; }
public String getSubtitle() { return mSubtitle; }
public boolean getOngoing() { return mOngoing; }
}
static private OCNotificationManager mInstance = null;
private class NotificationTypePair {
public Notification mNotificaiton;
public NotificationType mType;
public NotificationTypePair(Notification n, NotificationType type) {
mNotificaiton = n;
mType = type;
}
}
private Context mContext;
private Map<Integer, NotificationTypePair> mNotificationMap;
private int mNotificationCounter;
NotificationManager mNM;
static OCNotificationManager getInstance(Context context) {
if (mInstance == null)
mInstance = new OCNotificationManager(context);
return mInstance;
}
OCNotificationManager(Context context) {
mContext = context;
mNotificationMap = new HashMap<Integer, NotificationTypePair>();
mNM = (NotificationManager)mContext.getSystemService(Context.NOTIFICATION_SERVICE);
mNotificationCounter = 0;
}
public int postNotification(NotificationType type, NotificationData data) {
mNotificationCounter++;
Notification notification = null;
switch (type) {
case NOTIFICATION_SIMPLE:
notification = new Notification(DisplayUtils.getSeasonalIconId(), data.getText(), System.currentTimeMillis());
break;
case NOTIFICATION_PROGRESS:
notification = new Notification();
notification.contentView = new RemoteViews(mContext.getPackageName(), R.layout.progressbar_layout);
notification.contentView.setTextViewText(R.id.status_text,
data.getText());
notification.contentView.setImageViewResource(R.id.status_icon,
R.id.icon);
notification.contentView.setProgressBar(R.id.status_progress,
100,
data.getPercent(),
false);
break;
default:
return -1;
}
if (data.getOngoing()) {
notification.flags |= notification.flags | Notification.FLAG_ONGOING_EVENT;
}
mNotificationMap.put(mNotificationCounter, new NotificationTypePair(notification, type));
return mNotificationCounter;
}
public boolean updateNotification(int notification_id, NotificationData data) {
if (!mNotificationMap.containsKey(notification_id)) {
return false;
}
NotificationTypePair pair = mNotificationMap.get(notification_id);
switch (pair.mType) {
case NOTIFICATION_PROGRESS:
pair.mNotificaiton.contentView.setProgressBar(R.id.status_text,
100,
data.getPercent(),
false);
return true;
case NOTIFICATION_SIMPLE:
pair.mNotificaiton = new Notification(DisplayUtils.getSeasonalIconId(),
data.getText(), System.currentTimeMillis());
mNM.notify(notification_id, pair.mNotificaiton);
return true;
default:
return false;
}
}
public void discardNotification(int notification_id) {
mNM.cancel(notification_id);
mNotificationMap.remove(notification_id);
}
}
| gpl-2.0 |
thermatk/Telegram-FOSS | TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ts/Ac3Extractor.java | 5269 | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.telegram.messenger.exoplayer2.extractor.ts;
import org.telegram.messenger.exoplayer2.C;
import org.telegram.messenger.exoplayer2.audio.Ac3Util;
import org.telegram.messenger.exoplayer2.extractor.Extractor;
import org.telegram.messenger.exoplayer2.extractor.ExtractorInput;
import org.telegram.messenger.exoplayer2.extractor.ExtractorOutput;
import org.telegram.messenger.exoplayer2.extractor.ExtractorsFactory;
import org.telegram.messenger.exoplayer2.extractor.PositionHolder;
import org.telegram.messenger.exoplayer2.extractor.SeekMap;
import org.telegram.messenger.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator;
import org.telegram.messenger.exoplayer2.util.ParsableByteArray;
import org.telegram.messenger.exoplayer2.util.Util;
import java.io.IOException;
/**
* Facilitates the extraction of AC-3 samples from elementary audio files formatted as AC-3
* bitstreams.
*/
public final class Ac3Extractor implements Extractor {
/**
* Factory for {@link Ac3Extractor} instances.
*/
public static final ExtractorsFactory FACTORY = new ExtractorsFactory() {
@Override
public Extractor[] createExtractors() {
return new Extractor[] {new Ac3Extractor()};
}
};
/**
* The maximum number of bytes to search when sniffing, excluding ID3 information, before giving
* up.
*/
private static final int MAX_SNIFF_BYTES = 8 * 1024;
private static final int AC3_SYNC_WORD = 0x0B77;
private static final int MAX_SYNC_FRAME_SIZE = 2786;
private static final int ID3_TAG = Util.getIntegerCodeForString("ID3");
private final long firstSampleTimestampUs;
private final Ac3Reader reader;
private final ParsableByteArray sampleData;
private boolean startedPacket;
public Ac3Extractor() {
this(0);
}
public Ac3Extractor(long firstSampleTimestampUs) {
this.firstSampleTimestampUs = firstSampleTimestampUs;
reader = new Ac3Reader();
sampleData = new ParsableByteArray(MAX_SYNC_FRAME_SIZE);
}
@Override
public boolean sniff(ExtractorInput input) throws IOException, InterruptedException {
// Skip any ID3 headers.
ParsableByteArray scratch = new ParsableByteArray(10);
int startPosition = 0;
while (true) {
input.peekFully(scratch.data, 0, 10);
scratch.setPosition(0);
if (scratch.readUnsignedInt24() != ID3_TAG) {
break;
}
scratch.skipBytes(3);
int length = scratch.readSynchSafeInt();
startPosition += 10 + length;
input.advancePeekPosition(length);
}
input.resetPeekPosition();
input.advancePeekPosition(startPosition);
int headerPosition = startPosition;
int validFramesCount = 0;
while (true) {
input.peekFully(scratch.data, 0, 5);
scratch.setPosition(0);
int syncBytes = scratch.readUnsignedShort();
if (syncBytes != AC3_SYNC_WORD) {
validFramesCount = 0;
input.resetPeekPosition();
if (++headerPosition - startPosition >= MAX_SNIFF_BYTES) {
return false;
}
input.advancePeekPosition(headerPosition);
} else {
if (++validFramesCount >= 4) {
return true;
}
int frameSize = Ac3Util.parseAc3SyncframeSize(scratch.data);
if (frameSize == C.LENGTH_UNSET) {
return false;
}
input.advancePeekPosition(frameSize - 5);
}
}
}
@Override
public void init(ExtractorOutput output) {
reader.createTracks(output, new TrackIdGenerator(0, 1));
output.endTracks();
output.seekMap(new SeekMap.Unseekable(C.TIME_UNSET));
}
@Override
public void seek(long position, long timeUs) {
startedPacket = false;
reader.seek();
}
@Override
public void release() {
// Do nothing.
}
@Override
public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException,
InterruptedException {
int bytesRead = input.read(sampleData.data, 0, MAX_SYNC_FRAME_SIZE);
if (bytesRead == C.RESULT_END_OF_INPUT) {
return RESULT_END_OF_INPUT;
}
// Feed whatever data we have to the reader, regardless of whether the read finished or not.
sampleData.setPosition(0);
sampleData.setLimit(bytesRead);
if (!startedPacket) {
// Pass data to the reader as though it's contained within a single infinitely long packet.
reader.packetStarted(firstSampleTimestampUs, true);
startedPacket = true;
}
// TODO: Make it possible for the reader to consume the dataSource directly, so that it becomes
// unnecessary to copy the data through packetBuffer.
reader.consume(sampleData);
return RESULT_CONTINUE;
}
}
| gpl-2.0 |
GunioRobot/ntnu-prosjekt1 | mysql-connector-java-3.0.17-ga/com/mysql/jdbc/WatchableWriter.java | 1559 | /*
Copyright (C) 2002-2004 MySQL AB
This program is free software; you can redistribute it and/or modify
it under the terms of version 2 of the GNU General Public License as
published by the Free Software Foundation.
There are special exceptions to the terms and conditions of the GPL
as it is applied to this software. View the full text of the
exception exception in file EXCEPTIONS-CONNECTOR-J in the directory of this
software distribution.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.mysql.jdbc;
import java.io.CharArrayWriter;
/**
* A java.io.Writer used to write unicode data into
* Blobs and Clobs
*
* @author Mark Matthews
*/
class WatchableWriter extends CharArrayWriter {
private WriterWatcher watcher;
/**
* DOCUMENT ME!
*
* @param watcher DOCUMENT ME!
*/
public void setWatcher(WriterWatcher watcher) {
this.watcher = watcher;
}
/**
* @see java.io.Writer#close()
*/
public void close() {
super.close();
// Send data to watcher
if (this.watcher != null) {
this.watcher.writerClosed(this);
}
}
}
| gpl-2.0 |
bphinz/tigervnc | java/com/tigervnc/rdr/TLSInStream.java | 2795 | /* Copyright (C) 2002-2005 RealVNC Ltd. All Rights Reserved.
* Copyright (C) 2005 Martin Koegler
* Copyright (C) 2010 TigerVNC Team
* Copyright (C) 2011-2019 Brian P. Hinz
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*/
package com.tigervnc.rdr;
import java.nio.ByteBuffer;
import java.nio.channels.*;
import javax.net.ssl.*;
import com.tigervnc.network.*;
public class TLSInStream extends InStream {
static final int defaultBufSize = 16384;
public TLSInStream(InStream _in, SSLEngineManager _manager) {
in = (FdInStream)_in;
manager = _manager;
offset = 0;
SSLSession session = manager.getSession();
bufSize = session.getApplicationBufferSize();
b = new byte[bufSize];
ptr = end = start = 0;
}
public final int pos() {
return offset + ptr - start;
}
public final void startTiming() {
in.startTiming();
}
public final void stopTiming() {
in.stopTiming();
}
public final long kbitsPerSecond() {
return in.kbitsPerSecond();
}
public final long timeWaited() {
return in.timeWaited();
}
protected final int overrun(int itemSize, int nItems, boolean wait) {
if (itemSize > bufSize)
throw new Exception("TLSInStream overrun: max itemSize exceeded");
if (end - ptr != 0)
System.arraycopy(b, ptr, b, 0, end - ptr);
offset += ptr - start;
end -= ptr - start;
ptr = start;
while ((end - start) < itemSize) {
int n = readTLS(b, end, start + bufSize - end, wait);
if (!wait && n == 0)
return 0;
end += n;
}
int nAvail;
nAvail = (end - ptr) / itemSize;
if (nAvail < nItems)
return nAvail;
return nItems;
}
protected int readTLS(byte[] buf, int bufPtr, int len, boolean wait)
{
int n = -1;
try {
n = manager.read(ByteBuffer.wrap(buf, bufPtr, len), len);
} catch (java.io.IOException e) {
e.printStackTrace();
}
if (n < 0) throw new TLSException("readTLS", n);
return n;
}
private SSLEngineManager manager;
private int offset;
private int start;
private int bufSize;
private FdInStream in;
}
| gpl-2.0 |
murat8505/android-xbmcremote-1 | src/org/xbmc/android/widget/slidingtabs/SlidingTabActivity.java | 6251 | /*
* Copyright (C) 2005-2009 Team XBMC
* http://xbmc.org
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBMC Remote; see the file license. If not, write to
* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
* http://www.gnu.org/copyleft/gpl.html
*
*/
package org.xbmc.android.widget.slidingtabs;
import org.xbmc.android.remote.R;
import org.xbmc.android.remote.presentation.activity.HomeActivity;
import org.xbmc.android.util.KeyTracker;
import org.xbmc.android.util.OnLongPressBackKeyTracker;
import org.xbmc.android.util.KeyTracker.Stage;
import org.xbmc.api.type.ThumbSize;
import android.app.Activity;
import android.app.ActivityGroup;
import android.content.Intent;
import android.os.Bundle;
import android.os.Build.VERSION;
import android.view.Display;
import android.view.KeyEvent;
import android.view.View;
import android.widget.TabHost;
import android.widget.TabWidget;
import android.widget.TextView;
public class SlidingTabActivity extends ActivityGroup {
private SlidingTabHost mTabHost;
private String mDefaultTab = null;
private int mDefaultTabIndex = -1;
private KeyTracker mKeyTracker = null;
public SlidingTabActivity() {
if (Integer.parseInt(VERSION.SDK) < 5) {
mKeyTracker = new KeyTracker(new OnLongPressBackKeyTracker() {
@Override
public void onLongPressBack(int keyCode, KeyEvent event,
Stage stage, int duration) {
onKeyLongPress(keyCode, event);
}
@Override
public void onShortPressBack(int keyCode, KeyEvent event,
Stage stage, int duration) {
callSuperOnKeyDown(keyCode, event);
}
});
}
}
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// set display size
final Display display = getWindowManager().getDefaultDisplay();
ThumbSize.setScreenSize(display.getWidth(), display.getHeight());
}
protected void callSuperOnKeyDown(int keyCode, KeyEvent event) {
super.onKeyDown(keyCode, event);
}
/**
* Sets the default tab that is the first tab highlighted.
*
* @param tag
* the name of the default tab
*/
public void setDefaultTab(String tag) {
mDefaultTab = tag;
mDefaultTabIndex = -1;
}
/**
* Sets the default tab that is the first tab highlighted.
*
* @param index
* the index of the default tab
*/
public void setDefaultTab(int index) {
mDefaultTab = null;
mDefaultTabIndex = index;
}
@Override
protected void onRestoreInstanceState(Bundle state) {
super.onRestoreInstanceState(state);
ensureTabHost();
String cur = state.getString("currentTab");
if (cur != null) {
mTabHost.setCurrentTabByTag(cur);
}
if (mTabHost.getCurrentTab() < 0) {
if (mDefaultTab != null) {
mTabHost.setCurrentTabByTag(mDefaultTab);
} else if (mDefaultTabIndex >= 0) {
mTabHost.setCurrentTab(mDefaultTabIndex);
}
}
}
@Override
protected void onPostCreate(Bundle icicle) {
super.onPostCreate(icicle);
ensureTabHost();
if (mTabHost.getCurrentTab() == -1) {
mTabHost.setCurrentTab(0);
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
String currentTabTag = mTabHost.getCurrentTabTag();
if (currentTabTag != null) {
outState.putString("currentTab", currentTabTag);
}
}
/**
* Updates the screen state (current list and other views) when the content
* changes.
*
*@see Activity#onContentChanged()
*/
@Override
public void onContentChanged() {
super.onContentChanged();
mTabHost = (SlidingTabHost) findViewById(R.id.slidingtabhost);
if (mTabHost == null) {
throw new RuntimeException("Your content must have a TabHost whose id attribute is " + "'android.R.id.tabhost'");
}
mTabHost.setup(getLocalActivityManager());
}
private void ensureTabHost() {
if (mTabHost == null) {
this.setContentView(R.id.slidingtabhost);
}
}
@Override
protected void onChildTitleChanged(Activity childActivity, CharSequence title) {
// Dorky implementation until we can have multiple activities running.
if (getLocalActivityManager().getCurrentActivity() == childActivity) {
View tabView = mTabHost.getCurrentTabView();
if (tabView != null && tabView instanceof TextView) {
((TextView) tabView).setText(title);
}
}
}
/**
* Returns the {@link TabHost} the activity is using to host its tabs.
*
* @return the {@link TabHost} the activity is using to host its tabs.
*/
public SlidingTabHost getTabHost() {
ensureTabHost();
return mTabHost;
}
/**
* Returns the {@link TabWidget} the activity is using to draw the actual
* tabs.
*
* @return the {@link TabWidget} the activity is using to draw the actual
* tabs.
*/
public SlidingTabWidget getTabWidget() {
return mTabHost.getTabWidget();
}
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
Intent intent = new Intent(SlidingTabActivity.this, HomeActivity.class);
intent.setFlags(intent.getFlags() | Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
return true;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
boolean handled = (mKeyTracker != null)?mKeyTracker.doKeyDown(keyCode, event):false;
return handled || super.onKeyDown(keyCode, event);
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
boolean handled = (mKeyTracker != null)?mKeyTracker.doKeyUp(keyCode, event):false;
return handled || super.onKeyUp(keyCode, event);
}
} | gpl-2.0 |
arronvera/MyBlog | weiboSDK/build/generated/source/r/androidTest/debug/com/sina/weibo/sdk/R.java | 3391 | /* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package com.sina.weibo.sdk;
public final class R {
public static final class color {
public static final int com_sina_weibo_sdk_blue = 0x7f040000;
public static final int com_sina_weibo_sdk_loginview_text_color = 0x7f040001;
}
public static final class dimen {
public static final int activity_horizontal_margin = 0x7f050000;
public static final int activity_vertical_margin = 0x7f050001;
public static final int com_sina_weibo_sdk_loginview_compound_drawable_padding = 0x7f050002;
public static final int com_sina_weibo_sdk_loginview_padding = 0x7f050003;
public static final int com_sina_weibo_sdk_loginview_padding_bottom = 0x7f050004;
public static final int com_sina_weibo_sdk_loginview_padding_left = 0x7f050005;
public static final int com_sina_weibo_sdk_loginview_padding_right = 0x7f050006;
public static final int com_sina_weibo_sdk_loginview_padding_top = 0x7f050007;
public static final int com_sina_weibo_sdk_loginview_text_size = 0x7f050008;
}
public static final class drawable {
public static final int com_sina_weibo_sdk_button_blue = 0x7f020000;
public static final int com_sina_weibo_sdk_button_grey = 0x7f020001;
public static final int com_sina_weibo_sdk_login_button_with_account_text = 0x7f020002;
public static final int com_sina_weibo_sdk_login_button_with_frame_logo = 0x7f020003;
public static final int com_sina_weibo_sdk_login_button_with_original_logo = 0x7f020004;
public static final int ic_com_sina_weibo_sdk_button_blue_focused = 0x7f020005;
public static final int ic_com_sina_weibo_sdk_button_blue_normal = 0x7f020006;
public static final int ic_com_sina_weibo_sdk_button_blue_pressed = 0x7f020007;
public static final int ic_com_sina_weibo_sdk_button_grey_focused = 0x7f020008;
public static final int ic_com_sina_weibo_sdk_button_grey_normal = 0x7f020009;
public static final int ic_com_sina_weibo_sdk_button_grey_pressed = 0x7f02000a;
public static final int ic_com_sina_weibo_sdk_login_button_with_frame_logo_focused = 0x7f02000b;
public static final int ic_com_sina_weibo_sdk_login_button_with_frame_logo_normal = 0x7f02000c;
public static final int ic_com_sina_weibo_sdk_login_button_with_frame_logo_pressed = 0x7f02000d;
public static final int ic_com_sina_weibo_sdk_login_with_account_text_focused = 0x7f02000e;
public static final int ic_com_sina_weibo_sdk_login_with_account_text_normal = 0x7f02000f;
public static final int ic_com_sina_weibo_sdk_login_with_account_text_pressed = 0x7f020010;
public static final int ic_com_sina_weibo_sdk_login_with_text = 0x7f020011;
public static final int ic_com_sina_weibo_sdk_logo = 0x7f020012;
}
public static final class string {
public static final int com_sina_weibo_sdk_login = 0x7f030000;
public static final int com_sina_weibo_sdk_login_with_weibo_account = 0x7f030001;
public static final int com_sina_weibo_sdk_logout = 0x7f030002;
}
public static final class style {
public static final int AppBaseTheme = 0x7f060000;
public static final int AppTheme = 0x7f060001;
public static final int com_sina_weibo_sdk_loginview_default_style = 0x7f060002;
public static final int com_sina_weibo_sdk_loginview_silver_style = 0x7f060003;
}
}
| gpl-2.0 |
Taichi-SHINDO/jdk9-jdk | src/java.base/share/classes/com/sun/crypto/provider/TlsRsaPremasterSecretGenerator.java | 3222 | /*
* Copyright (c) 2005, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.crypto.provider;
import java.security.*;
import java.security.spec.AlgorithmParameterSpec;
import javax.crypto.*;
import javax.crypto.spec.SecretKeySpec;
import sun.security.internal.spec.TlsRsaPremasterSecretParameterSpec;
/**
* KeyGenerator implementation for the SSL/TLS RSA premaster secret.
*
* @author Andreas Sterbenz
* @since 1.6
*/
public final class TlsRsaPremasterSecretGenerator extends KeyGeneratorSpi {
private final static String MSG = "TlsRsaPremasterSecretGenerator must be "
+ "initialized using a TlsRsaPremasterSecretParameterSpec";
@SuppressWarnings("deprecation")
private TlsRsaPremasterSecretParameterSpec spec;
private SecureRandom random;
public TlsRsaPremasterSecretGenerator() {
}
protected void engineInit(SecureRandom random) {
throw new InvalidParameterException(MSG);
}
@SuppressWarnings("deprecation")
protected void engineInit(AlgorithmParameterSpec params,
SecureRandom random) throws InvalidAlgorithmParameterException {
if (!(params instanceof TlsRsaPremasterSecretParameterSpec)) {
throw new InvalidAlgorithmParameterException(MSG);
}
this.spec = (TlsRsaPremasterSecretParameterSpec)params;
this.random = random;
}
protected void engineInit(int keysize, SecureRandom random) {
throw new InvalidParameterException(MSG);
}
// Only can be used in client side to generate TLS RSA premaster secret.
protected SecretKey engineGenerateKey() {
if (spec == null) {
throw new IllegalStateException(
"TlsRsaPremasterSecretGenerator must be initialized");
}
if (random == null) {
random = new SecureRandom();
}
byte[] b = new byte[48];
random.nextBytes(b);
b[0] = (byte)spec.getMajorVersion();
b[1] = (byte)spec.getMinorVersion();
return new SecretKeySpec(b, "TlsRsaPremasterSecret");
}
}
| gpl-2.0 |
bovard/battlecode-server-2013-java18 | src/main/battlecode/server/proxy/Proxy.java | 3412 | package battlecode.server.proxy;
import battlecode.serial.MatchFooter;
import battlecode.serial.MatchHeader;
import battlecode.serial.RoundDelta;
import battlecode.serial.RoundStats;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
/**
* This class represents a sink for match data. It provides a means for writing
* various types of data to a match recipient (typically a file or a TCP socket)
* and managing the connection to the recipient. Implementations typcially
* need only to present an OutputStream; the default Proxy implementation is
* sufficient for most cases.
*/
public abstract class Proxy {
/**
* The output stream used for writing objects.
*/
protected ObjectOutputStream output;
/**
* Gets the OutputStream used for
*
* @return the OutputStream to use for writing to the recipient; this method
* should never return null
* @throws IOException if the OutputStream instance could not be obtained
*/
protected abstract OutputStream getOutputStream() throws IOException;
public Proxy() {
this.output = null;
}
/**
* Prepares the connection for match data.
*
* @throws IOException if the connection cannot be opened
*/
public void open() throws IOException {
OutputStream out = getOutputStream();
if (out != null) {
if (out instanceof ObjectOutputStream)
this.output = (ObjectOutputStream) out;
else
this.output = new ObjectOutputStream(out);
this.output.flush();
}
}
/**
* Closes the connection.
*
* @throws IOException if the connection cannot be closed.
*/
public void close() throws IOException {
if (output != null) {
this.output.flush();
this.output.close();
}
}
/**
* Tries to write the given object to the recipient.
*
* @param o the object to write
* @throws IOException if the object could not be written
*/
public void writeObject(Object o) throws IOException {
if (output != null) {
output.reset();
output.writeObject(o);
}
}
/**
* Writes header data to the recipient.
*
* @param data the header data bytes to write
* @throws IOException if the recipient could not be written to
*/
public void writeHeader(MatchHeader header) throws IOException {
writeObject(header);
}
/**
* Writes round data to the recipient.
*
* @param data the round data bytes to write
* @throws IOException if the recipient could not be written to
*/
public void writeRound(RoundDelta round) throws IOException {
writeObject(round);
}
/**
* Writes footer data to the recipient.
*
* @param data the footer data bytes to write
* @throws IOException if the recipient could not be written to
*/
public void writeFooter(MatchFooter footer) throws IOException {
writeObject(footer);
}
/**
* Writes stats data to the recipient.
*
* @param data the stats data bytes to write
* @throws IOException if the recipient could not be written to
*/
public void writeStats(RoundStats stats) throws IOException {
writeObject(stats);
}
}
| gpl-3.0 |
s20121035/rk3288_android5.1_repo | external/apache-harmony/jdwp/src/test/java/org/apache/harmony/jpda/tests/jdwp/MultiSession/JDWPEventTestCase.java | 1250 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Anton V. Karnachuk
*/
/**
* Created on 11.03.2005
*/
package org.apache.harmony.jpda.tests.jdwp.MultiSession;
import org.apache.harmony.jpda.tests.jdwp.share.JDWPSyncTestCase;
/**
* Internal class that extends functionality of JDWPSyncTestCase class.
*/
abstract class JDWPEventTestCase extends JDWPSyncTestCase {
protected String getDebuggeeClassName() {
return EventDebuggee.class.getName();
}
}
| gpl-3.0 |
decuri/BLEChat | src/com/hardcopy/blechat/bluetooth/TransactionReceiver.java | 2419 | /*
* Copyright (C) 2014 Bluetooth Connection Template
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hardcopy.blechat.bluetooth;
import java.util.ArrayList;
import android.os.Handler;
/**
* Parse stream and extract accel data
* @author Administrator
*/
public class TransactionReceiver {
private static final String TAG = "TransactionReceiver";
private static final int PARSE_MODE_ERROR = 0;
private static final int PARSE_MODE_WAIT_START_BYTE = 1;
private static final int PARSE_MODE_WAIT_COMMAND = 2;
private static final int PARSE_MODE_WAIT_DATA = 3;
private static final int PARSE_MODE_WAIT_END_BYTE = 4;
private static final int PARSE_MODE_COMPLETED = 101;
private Handler mHandler = null;
public TransactionReceiver(Handler h) {
mHandler = h;
reset();
}
/**
* Reset transaction receiver.
*/
public void reset() {
}
/**
* Set bytes to parse
* This method automatically calls parseStream()
* @param buffer
* @param count
*/
public void setByteArray(byte[] buffer, int count) {
parseStream(buffer, count);
}
/**
* After parsing bytes received, transaction receiver makes object instance.
* This method returns parsed results
* @return Object parsed object
*/
public Object getObject() {
// TODO: return what you want
return null;
}
/**
* Caching received stream and parse byte array
* @param buffer byte array to parse
* @param count byte array size
*/
public void parseStream(byte[] buffer, int count) {
if(buffer != null && buffer.length > 0 && count > 0) {
for(int i=0; i < buffer.length && i < count; i++) {
// Parse received data
// Protocol description -----------------------------------------------------------
// Describe brief info about protocol
// TODO: parse buffer
} // End of for loop
} // End of if()
} // End of parseStream()
}
| gpl-3.0 |
AnodeCathode/TFCraft | src/Common/com/bioxx/tfc/Render/Blocks/RenderGrill.java | 5928 | package com.bioxx.tfc.Render.Blocks;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.world.IBlockAccess;
import cpw.mods.fml.client.registry.ISimpleBlockRenderingHandler;
import org.lwjgl.opengl.GL11;
import com.bioxx.tfc.Blocks.Devices.BlockGrill;
public class RenderGrill implements ISimpleBlockRenderingHandler
{
/*private static float pixel3 = 3f / 16f;
private static float pixel5 = 5f / 16f;
private static float pixel12 = 12f / 16f;
private static float pixel14 = 14f / 16f;*/
@Override
public boolean renderWorldBlock(IBlockAccess world, int i, int j, int k, Block block, int modelId, RenderBlocks renderer)
{
//IBlockAccess blockAccess = renderer.blockAccess;
/*if(!TFCOptions.use2DGrill)
{
renderer.renderAllFaces = true;
renderer.setRenderBounds(0.0F, -0.05F, 0.0F, 0.05F, 0.0F, 1.0F);//minX edge
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.95F, -0.05F, 0.0F, 1F, 0.0F, 1.0F);//maxX edge
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.0F, 0.95F, 0.0F, 0.05F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.1F, 0.95F, 0.0F, 0.15F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.2F, 0.95F, 0.0F, 0.25F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.3F, 0.95F, 0.0F, 0.35F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.4F, 0.95F, 0.0F, 0.475F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.525F, 0.95F, 0.0F, 0.6F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.65F, 0.95F, 0.0F, 0.7F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.75F, 0.95F, 0.0F, 0.8F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.85F, 0.95F, 0.0F, 0.9F);
renderer.renderStandardBlock(block, i, j, k);
renderer.setRenderBounds(0.05F, -0.05F, 0.95F, 0.95F, 0.0F, 1.0F);
renderer.renderStandardBlock(block, i, j, k);
renderer.renderAllFaces = false;
}
else
{ */
BlockGrill grill = (BlockGrill) block;
int meta = world.getBlockMetadata(i, j, k);
if (!grill.isGrillOpen(meta))
renderer.setRenderBounds(0.0F, -0.05F, 0.0F, 1F, 0.0F, 1.0F);
renderer.renderStandardBlock(block, i, j, k);
//}
return true;
}
public void rotate(RenderBlocks renderer, int i)
{
renderer.uvRotateEast = i;
renderer.uvRotateWest = i;
renderer.uvRotateNorth = i;
renderer.uvRotateSouth = i;
}
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderer)
{
/*if(!TFCOptions.use2DGrill)
{
renderer.setRenderBounds(0.0F, 0.5F, 0.0F, 0.05F, 0.55F, 1.0F);//minX edge
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.95F, 0.5F, 0.0F, 1F, 0.55F, 1.0F);//maxX edge
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.0F, 0.95F, 0.55F, 0.05F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.1F, 0.95F, 0.55F, 0.15F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.2F, 0.95F, 0.55F, 0.25F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.3F, 0.95F, 0.55F, 0.35F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.4F, 0.95F, 0.55F, 0.475F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.525F, 0.95F, 0.55F, 0.6F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.65F, 0.95F, 0.55F, 0.7F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.75F, 0.95F, 0.55F, 0.8F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.85F, 0.95F, 0.55F, 0.9F);
renderInvBlock(block, metadata, renderer);
renderer.setRenderBounds(0.05F, 0.5F, 0.95F, 0.95F, 0.55F, 1.0F);
renderInvBlock(block, metadata, renderer);
}
else
{*/
renderer.setRenderBounds(0.0F, 0.5F, 0.0F, 1F, 0.55F, 1.0F);
renderInvBlock(block, metadata, renderer);
//};
}
@Override
public boolean shouldRender3DInInventory(int modelId)
{
return true;
}
@Override
public int getRenderId()
{
return 0;
}
public static void renderInvBlock(Block block, int m, RenderBlocks renderer)
{
Tessellator var14 = Tessellator.instance;
GL11.glTranslatef(-0.5F, -0.5F, -0.5F);
var14.startDrawingQuads();
var14.setNormal(0.0F, -1.0F, 0.0F);
renderer.renderFaceYNeg(block, 0.0D, 0.0D, 0.0D, block.getIcon(0, m));
var14.draw();
var14.startDrawingQuads();
var14.setNormal(0.0F, 1.0F, 0.0F);
renderer.renderFaceYPos(block, 0.0D, 0.0D, 0.0D, block.getIcon(1, m));
var14.draw();
var14.startDrawingQuads();
var14.setNormal(0.0F, 0.0F, -1.0F);
renderer.renderFaceXNeg(block, 0.0D, 0.0D, 0.0D, block.getIcon(2, m));
var14.draw();
var14.startDrawingQuads();
var14.setNormal(0.0F, 0.0F, 1.0F);
renderer.renderFaceXPos(block, 0.0D, 0.0D, 0.0D, block.getIcon(3, m));
var14.draw();
var14.startDrawingQuads();
var14.setNormal(-1.0F, 0.0F, 0.0F);
renderer.renderFaceZNeg(block, 0.0D, 0.0D, 0.0D, block.getIcon(4, m));
var14.draw();
var14.startDrawingQuads();
var14.setNormal(1.0F, 0.0F, 0.0F);
renderer.renderFaceZPos(block, 0.0D, 0.0D, 0.0D, block.getIcon(5, m));
var14.draw();
GL11.glTranslatef(0.5F, 0.5F, 0.5F);
}
}
| gpl-3.0 |