repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
JetBrains/jdk8u_jdk | src/share/classes/java/util/stream/LongPipeline.java | 22394 | /*
* Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util.stream;
import java.util.LongSummaryStatistics;
import java.util.Objects;
import java.util.OptionalDouble;
import java.util.OptionalLong;
import java.util.PrimitiveIterator;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.IntFunction;
import java.util.function.LongBinaryOperator;
import java.util.function.LongConsumer;
import java.util.function.LongFunction;
import java.util.function.LongPredicate;
import java.util.function.LongToDoubleFunction;
import java.util.function.LongToIntFunction;
import java.util.function.LongUnaryOperator;
import java.util.function.ObjLongConsumer;
import java.util.function.Supplier;
/**
* Abstract base class for an intermediate pipeline stage or pipeline source
* stage implementing whose elements are of type {@code long}.
*
* @param <E_IN> type of elements in the upstream source
* @since 1.8
*/
abstract class LongPipeline<E_IN>
extends AbstractPipeline<E_IN, Long, LongStream>
implements LongStream {
/**
* Constructor for the head of a stream pipeline.
*
* @param source {@code Supplier<Spliterator>} describing the stream source
* @param sourceFlags the source flags for the stream source, described in
* {@link StreamOpFlag}
* @param parallel {@code true} if the pipeline is parallel
*/
LongPipeline(Supplier<? extends Spliterator<Long>> source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
/**
* Constructor for the head of a stream pipeline.
*
* @param source {@code Spliterator} describing the stream source
* @param sourceFlags the source flags for the stream source, described in
* {@link StreamOpFlag}
* @param parallel {@code true} if the pipeline is parallel
*/
LongPipeline(Spliterator<Long> source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
/**
* Constructor for appending an intermediate operation onto an existing pipeline.
*
* @param upstream the upstream element source.
* @param opFlags the operation flags
*/
LongPipeline(AbstractPipeline<?, E_IN, ?> upstream, int opFlags) {
super(upstream, opFlags);
}
/**
* Adapt a {@code Sink<Long> to an {@code LongConsumer}, ideally simply
* by casting.
*/
private static LongConsumer adapt(Sink<Long> sink) {
if (sink instanceof LongConsumer) {
return (LongConsumer) sink;
} else {
if (Tripwire.ENABLED)
Tripwire.trip(AbstractPipeline.class,
"using LongStream.adapt(Sink<Long> s)");
return sink::accept;
}
}
/**
* Adapt a {@code Spliterator<Long>} to a {@code Spliterator.OfLong}.
*
* @implNote
* The implementation attempts to cast to a Spliterator.OfLong, and throws
* an exception if this cast is not possible.
*/
private static Spliterator.OfLong adapt(Spliterator<Long> s) {
if (s instanceof Spliterator.OfLong) {
return (Spliterator.OfLong) s;
} else {
if (Tripwire.ENABLED)
Tripwire.trip(AbstractPipeline.class,
"using LongStream.adapt(Spliterator<Long> s)");
throw new UnsupportedOperationException("LongStream.adapt(Spliterator<Long> s)");
}
}
// Shape-specific methods
@Override
final StreamShape getOutputShape() {
return StreamShape.LONG_VALUE;
}
@Override
final <P_IN> Node<Long> evaluateToNode(PipelineHelper<Long> helper,
Spliterator<P_IN> spliterator,
boolean flattenTree,
IntFunction<Long[]> generator) {
return Nodes.collectLong(helper, spliterator, flattenTree);
}
@Override
final <P_IN> Spliterator<Long> wrap(PipelineHelper<Long> ph,
Supplier<Spliterator<P_IN>> supplier,
boolean isParallel) {
return new StreamSpliterators.LongWrappingSpliterator<>(ph, supplier, isParallel);
}
@Override
@SuppressWarnings("unchecked")
final Spliterator.OfLong lazySpliterator(Supplier<? extends Spliterator<Long>> supplier) {
return new StreamSpliterators.DelegatingSpliterator.OfLong((Supplier<Spliterator.OfLong>) supplier);
}
@Override
final void forEachWithCancel(Spliterator<Long> spliterator, Sink<Long> sink) {
Spliterator.OfLong spl = adapt(spliterator);
LongConsumer adaptedSink = adapt(sink);
do { } while (!sink.cancellationRequested() && spl.tryAdvance(adaptedSink));
}
@Override
final Node.Builder<Long> makeNodeBuilder(long exactSizeIfKnown, IntFunction<Long[]> generator) {
return Nodes.longBuilder(exactSizeIfKnown);
}
// LongStream
@Override
public final PrimitiveIterator.OfLong iterator() {
return Spliterators.iterator(spliterator());
}
@Override
public final Spliterator.OfLong spliterator() {
return adapt(super.spliterator());
}
// Stateless intermediate ops from LongStream
@Override
public final DoubleStream asDoubleStream() {
return new DoublePipeline.StatelessOp<Long>(this, StreamShape.LONG_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Double> sink) {
return new Sink.ChainedLong<Double>(sink) {
@Override
public void accept(long t) {
downstream.accept((double) t);
}
};
}
};
}
@Override
public final Stream<Long> boxed() {
return mapToObj(Long::valueOf);
}
@Override
public final LongStream map(LongUnaryOperator mapper) {
Objects.requireNonNull(mapper);
return new StatelessOp<Long>(this, StreamShape.LONG_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Long> sink) {
return new Sink.ChainedLong<Long>(sink) {
@Override
public void accept(long t) {
downstream.accept(mapper.applyAsLong(t));
}
};
}
};
}
@Override
public final <U> Stream<U> mapToObj(LongFunction<? extends U> mapper) {
Objects.requireNonNull(mapper);
return new ReferencePipeline.StatelessOp<Long, U>(this, StreamShape.LONG_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink<Long> opWrapSink(int flags, Sink<U> sink) {
return new Sink.ChainedLong<U>(sink) {
@Override
public void accept(long t) {
downstream.accept(mapper.apply(t));
}
};
}
};
}
@Override
public final IntStream mapToInt(LongToIntFunction mapper) {
Objects.requireNonNull(mapper);
return new IntPipeline.StatelessOp<Long>(this, StreamShape.LONG_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Integer> sink) {
return new Sink.ChainedLong<Integer>(sink) {
@Override
public void accept(long t) {
downstream.accept(mapper.applyAsInt(t));
}
};
}
};
}
@Override
public final DoubleStream mapToDouble(LongToDoubleFunction mapper) {
Objects.requireNonNull(mapper);
return new DoublePipeline.StatelessOp<Long>(this, StreamShape.LONG_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Double> sink) {
return new Sink.ChainedLong<Double>(sink) {
@Override
public void accept(long t) {
downstream.accept(mapper.applyAsDouble(t));
}
};
}
};
}
@Override
public final LongStream flatMap(LongFunction<? extends LongStream> mapper) {
Objects.requireNonNull(mapper);
return new StatelessOp<Long>(this, StreamShape.LONG_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT | StreamOpFlag.NOT_SIZED) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Long> sink) {
return new Sink.ChainedLong<Long>(sink) {
// true if cancellationRequested() has been called
boolean cancellationRequestedCalled;
// cache the consumer to avoid creation on every accepted element
LongConsumer downstreamAsLong = downstream::accept;
@Override
public void begin(long size) {
downstream.begin(-1);
}
@Override
public void accept(long t) {
try (LongStream result = mapper.apply(t)) {
if (result != null) {
if (!cancellationRequestedCalled) {
result.sequential().forEach(downstreamAsLong);
}
else {
Spliterator.OfLong s = result.sequential().spliterator();
do { } while (!downstream.cancellationRequested() && s.tryAdvance(downstreamAsLong));
}
}
}
}
@Override
public boolean cancellationRequested() {
// If this method is called then an operation within the stream
// pipeline is short-circuiting (see AbstractPipeline.copyInto).
// Note that we cannot differentiate between an upstream or
// downstream operation
cancellationRequestedCalled = true;
return downstream.cancellationRequested();
}
};
}
};
}
@Override
public LongStream unordered() {
if (!isOrdered())
return this;
return new StatelessOp<Long>(this, StreamShape.LONG_VALUE, StreamOpFlag.NOT_ORDERED) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Long> sink) {
return sink;
}
};
}
@Override
public final LongStream filter(LongPredicate predicate) {
Objects.requireNonNull(predicate);
return new StatelessOp<Long>(this, StreamShape.LONG_VALUE,
StreamOpFlag.NOT_SIZED) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Long> sink) {
return new Sink.ChainedLong<Long>(sink) {
@Override
public void begin(long size) {
downstream.begin(-1);
}
@Override
public void accept(long t) {
if (predicate.test(t))
downstream.accept(t);
}
};
}
};
}
@Override
public final LongStream peek(LongConsumer action) {
Objects.requireNonNull(action);
return new StatelessOp<Long>(this, StreamShape.LONG_VALUE,
0) {
@Override
Sink<Long> opWrapSink(int flags, Sink<Long> sink) {
return new Sink.ChainedLong<Long>(sink) {
@Override
public void accept(long t) {
action.accept(t);
downstream.accept(t);
}
};
}
};
}
// Stateful intermediate ops from LongStream
@Override
public final LongStream limit(long maxSize) {
if (maxSize < 0)
throw new IllegalArgumentException(Long.toString(maxSize));
return SliceOps.makeLong(this, 0, maxSize);
}
@Override
public final LongStream skip(long n) {
if (n < 0)
throw new IllegalArgumentException(Long.toString(n));
if (n == 0)
return this;
else
return SliceOps.makeLong(this, n, -1);
}
@Override
public final LongStream sorted() {
return SortedOps.makeLong(this);
}
@Override
public final LongStream distinct() {
// While functional and quick to implement, this approach is not very efficient.
// An efficient version requires a long-specific map/set implementation.
return boxed().distinct().mapToLong(i -> (long) i);
}
// Terminal ops from LongStream
@Override
public void forEach(LongConsumer action) {
evaluate(ForEachOps.makeLong(action, false));
}
@Override
public void forEachOrdered(LongConsumer action) {
evaluate(ForEachOps.makeLong(action, true));
}
@Override
public final long sum() {
// use better algorithm to compensate for intermediate overflow?
return reduce(0, Long::sum);
}
@Override
public final OptionalLong min() {
return reduce(Math::min);
}
@Override
public final OptionalLong max() {
return reduce(Math::max);
}
@Override
public final OptionalDouble average() {
long[] avg = collect(() -> new long[2],
(ll, i) -> {
ll[0]++;
ll[1] += i;
},
(ll, rr) -> {
ll[0] += rr[0];
ll[1] += rr[1];
});
return avg[0] > 0
? OptionalDouble.of((double) avg[1] / avg[0])
: OptionalDouble.empty();
}
@Override
public final long count() {
return map(e -> 1L).sum();
}
@Override
public final LongSummaryStatistics summaryStatistics() {
return collect(LongSummaryStatistics::new, LongSummaryStatistics::accept,
LongSummaryStatistics::combine);
}
@Override
public final long reduce(long identity, LongBinaryOperator op) {
return evaluate(ReduceOps.makeLong(identity, op));
}
@Override
public final OptionalLong reduce(LongBinaryOperator op) {
return evaluate(ReduceOps.makeLong(op));
}
@Override
public final <R> R collect(Supplier<R> supplier,
ObjLongConsumer<R> accumulator,
BiConsumer<R, R> combiner) {
Objects.requireNonNull(combiner);
BinaryOperator<R> operator = (left, right) -> {
combiner.accept(left, right);
return left;
};
return evaluate(ReduceOps.makeLong(supplier, accumulator, operator));
}
@Override
public final boolean anyMatch(LongPredicate predicate) {
return evaluate(MatchOps.makeLong(predicate, MatchOps.MatchKind.ANY));
}
@Override
public final boolean allMatch(LongPredicate predicate) {
return evaluate(MatchOps.makeLong(predicate, MatchOps.MatchKind.ALL));
}
@Override
public final boolean noneMatch(LongPredicate predicate) {
return evaluate(MatchOps.makeLong(predicate, MatchOps.MatchKind.NONE));
}
@Override
public final OptionalLong findFirst() {
return evaluate(FindOps.makeLong(true));
}
@Override
public final OptionalLong findAny() {
return evaluate(FindOps.makeLong(false));
}
@Override
public final long[] toArray() {
return Nodes.flattenLong((Node.OfLong) evaluateToArrayNode(Long[]::new))
.asPrimitiveArray();
}
//
/**
* Source stage of a LongPipeline.
*
* @param <E_IN> type of elements in the upstream source
* @since 1.8
*/
static class Head<E_IN> extends LongPipeline<E_IN> {
/**
* Constructor for the source stage of a LongStream.
*
* @param source {@code Supplier<Spliterator>} describing the stream
* source
* @param sourceFlags the source flags for the stream source, described
* in {@link StreamOpFlag}
* @param parallel {@code true} if the pipeline is parallel
*/
Head(Supplier<? extends Spliterator<Long>> source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
/**
* Constructor for the source stage of a LongStream.
*
* @param source {@code Spliterator} describing the stream source
* @param sourceFlags the source flags for the stream source, described
* in {@link StreamOpFlag}
* @param parallel {@code true} if the pipeline is parallel
*/
Head(Spliterator<Long> source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
@Override
final boolean opIsStateful() {
throw new UnsupportedOperationException();
}
@Override
final Sink<E_IN> opWrapSink(int flags, Sink<Long> sink) {
throw new UnsupportedOperationException();
}
// Optimized sequential terminal operations for the head of the pipeline
@Override
public void forEach(LongConsumer action) {
if (!isParallel()) {
adapt(sourceStageSpliterator()).forEachRemaining(action);
} else {
super.forEach(action);
}
}
@Override
public void forEachOrdered(LongConsumer action) {
if (!isParallel()) {
adapt(sourceStageSpliterator()).forEachRemaining(action);
} else {
super.forEachOrdered(action);
}
}
}
/** Base class for a stateless intermediate stage of a LongStream.
*
* @param <E_IN> type of elements in the upstream source
* @since 1.8
*/
abstract static class StatelessOp<E_IN> extends LongPipeline<E_IN> {
/**
* Construct a new LongStream by appending a stateless intermediate
* operation to an existing stream.
* @param upstream The upstream pipeline stage
* @param inputShape The stream shape for the upstream pipeline stage
* @param opFlags Operation flags for the new stage
*/
StatelessOp(AbstractPipeline<?, E_IN, ?> upstream,
StreamShape inputShape,
int opFlags) {
super(upstream, opFlags);
assert upstream.getOutputShape() == inputShape;
}
@Override
final boolean opIsStateful() {
return false;
}
}
/**
* Base class for a stateful intermediate stage of a LongStream.
*
* @param <E_IN> type of elements in the upstream source
* @since 1.8
*/
abstract static class StatefulOp<E_IN> extends LongPipeline<E_IN> {
/**
* Construct a new LongStream by appending a stateful intermediate
* operation to an existing stream.
* @param upstream The upstream pipeline stage
* @param inputShape The stream shape for the upstream pipeline stage
* @param opFlags Operation flags for the new stage
*/
StatefulOp(AbstractPipeline<?, E_IN, ?> upstream,
StreamShape inputShape,
int opFlags) {
super(upstream, opFlags);
assert upstream.getOutputShape() == inputShape;
}
@Override
final boolean opIsStateful() {
return true;
}
@Override
abstract <P_IN> Node<Long> opEvaluateParallel(PipelineHelper<Long> helper,
Spliterator<P_IN> spliterator,
IntFunction<Long[]> generator);
}
}
| gpl-2.0 |
justinwm/astor | examples/evo_suite_test/math_70_spooned/evosuite/evosuite-tests/org/apache/commons/math/analysis/solvers/BisectionSolver_ESTest.java | 15353 | package org.apache.commons.math.analysis.solvers;
@org.junit.runner.RunWith(value = org.evosuite.runtime.EvoRunner.class)
@org.evosuite.runtime.EvoRunnerParameters(mockJVMNonDeterminism = true, resetStaticState = true, separateClassLoader = true, useVFS = true, useVNET = true)
public class BisectionSolver_ESTest extends org.apache.commons.math.analysis.solvers.BisectionSolver_ESTest_scaffolding {
@org.junit.Test(timeout = 4000)
public void test00() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.UnivariateRealFunction univariateRealFunction0 = expm1Function0.derivative();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(univariateRealFunction0);
bisectionSolver0.setAbsoluteAccuracy(0.0);
double double0 = bisectionSolver0.solve(0.0, 1203.227358259, 1.0);
org.junit.Assert.assertEquals(52, bisectionSolver0.getIterationCount());
org.junit.Assert.assertEquals(1203.227358259, double0, 0.01);
}
@org.junit.Test(timeout = 4000)
public void test01() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)));
double double0 = bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)), 0.0, 2.117582368135751E-36, 0.0);
org.junit.Assert.assertEquals(5.293955920339377E-37, double0, 0.01);
org.junit.Assert.assertEquals(0, bisectionSolver0.getIterationCount());
}
@org.junit.Test(timeout = 4000)
public void test02() throws java.lang.Throwable {
org.apache.commons.math.analysis.QuinticFunction quinticFunction0 = new org.apache.commons.math.analysis.QuinticFunction();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(quinticFunction0)));
double double0 = bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(quinticFunction0)), -4688.869543201, -1.0, -1.0);
org.junit.Assert.assertEquals(32, bisectionSolver0.getIterationCount());
org.junit.Assert.assertEquals(-1.0000002728699209, double0, 0.01);
}
@org.junit.Test(timeout = 4000)
public void test03() throws java.lang.Throwable {
org.apache.commons.math.analysis.QuinticFunction quinticFunction0 = new org.apache.commons.math.analysis.QuinticFunction();
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)));
double double0 = bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(quinticFunction0)), 0.0, 1.0E-15);
org.junit.Assert.assertEquals(2.5E-16, double0, 0.01);
org.junit.Assert.assertEquals(0, bisectionSolver0.getIterationCount());
}
@org.junit.Test(timeout = 4000)
public void test04() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.UnivariateRealFunction univariateRealFunction0 = expm1Function0.derivative();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(univariateRealFunction0);
double double0 = bisectionSolver0.solve(-2982.31880945, 1.0, 1.0);
org.junit.Assert.assertEquals(31, bisectionSolver0.getIterationCount());
org.junit.Assert.assertEquals(-2982.318809102696, double0, 0.01);
}
@org.junit.Test(timeout = 4000)
public void test05() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)));
double double0 = bisectionSolver0.solve(0.0, 1.0E-12);
org.junit.Assert.assertEquals(0, bisectionSolver0.getIterationCount());
org.junit.Assert.assertEquals(2.5E-13, double0, 0.01);
}
@org.junit.Test(timeout = 4000)
public void test06() throws java.lang.Throwable {
org.apache.commons.math.analysis.SinFunction sinFunction0 = new org.apache.commons.math.analysis.SinFunction();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(sinFunction0)));
double double0 = bisectionSolver0.solve(-2016.254123630258, 0.0);
org.junit.Assert.assertEquals(30, bisectionSolver0.getIterationCount());
org.junit.Assert.assertEquals(-1108.9822069248048, double0, 0.01);
}
@org.junit.Test(timeout = 4000)
public void test07() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
try {
bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(null)), 0.0, 0.0, 0.0);
org.junit.Assert.fail("Expecting exception: IllegalArgumentException");
} catch (java.lang.IllegalArgumentException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.MathRuntimeException", e);
}
}
@org.junit.Test(timeout = 4000)
public void test08() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
org.apache.commons.math.analysis.QuinticFunction quinticFunction0 = new org.apache.commons.math.analysis.QuinticFunction();
bisectionSolver0.setMaximalIterationCount(-927);
try {
bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(quinticFunction0)), 0.3503984076660156, 557.0, 1.0);
org.junit.Assert.fail("Expecting exception: Exception");
} catch (java.lang.Exception e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
@org.junit.Test(timeout = 4000)
public void test09() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
org.apache.commons.math.analysis.QuinticFunction quinticFunction0 = new org.apache.commons.math.analysis.QuinticFunction();
try {
bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(quinticFunction0)), 1.0, 1.0);
org.junit.Assert.fail("Expecting exception: IllegalArgumentException");
} catch (java.lang.IllegalArgumentException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.MathRuntimeException", e);
}
}
@org.junit.Test(timeout = 4000)
public void test10() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
try {
bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(null)), -1567.121909636, 0.0);
org.junit.Assert.fail("Expecting exception: NullPointerException");
} catch (java.lang.NullPointerException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
@org.junit.Test(timeout = 4000)
public void test11() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
try {
bisectionSolver0.solve(-875.52475907, -875.52475907, -3772.9171773759085);
org.junit.Assert.fail("Expecting exception: IllegalArgumentException");
} catch (java.lang.IllegalArgumentException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.MathRuntimeException", e);
}
}
@org.junit.Test(timeout = 4000)
public void test12() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.MonitoredFunction monitoredFunction0 = new org.apache.commons.math.analysis.MonitoredFunction(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)));
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(monitoredFunction0)));
bisectionSolver0.setAbsoluteAccuracy(-289.8);
try {
bisectionSolver0.solve(-289.8, 0.2605447570801971, -289.8);
org.junit.Assert.fail("Expecting exception: Exception");
} catch (java.lang.Exception e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
@org.junit.Test(timeout = 4000)
public void test13() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
try {
bisectionSolver0.solve(0.5, 1553.24562058039, 0.5);
org.junit.Assert.fail("Expecting exception: NullPointerException");
} catch (java.lang.NullPointerException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
@org.junit.Test(timeout = 4000)
public void test14() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)));
bisectionSolver0.setAbsoluteAccuracy(-1604.6787);
try {
bisectionSolver0.solve(-1604.6787, 0.0);
org.junit.Assert.fail("Expecting exception: Exception");
} catch (java.lang.Exception e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
@org.junit.Test(timeout = 4000)
public void test15() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
try {
bisectionSolver0.solve(-3907.8956384, 439.713944048);
org.junit.Assert.fail("Expecting exception: NullPointerException");
} catch (java.lang.NullPointerException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
@org.junit.Test(timeout = 4000)
public void test16() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = null;
try {
bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(null)));
org.junit.Assert.fail("Expecting exception: IllegalArgumentException");
} catch (java.lang.IllegalArgumentException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.MathRuntimeException", e);
}
}
@org.junit.Test(timeout = 4000)
public void test17() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.MonitoredFunction monitoredFunction0 = new org.apache.commons.math.analysis.MonitoredFunction(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)));
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(monitoredFunction0)));
double double0 = bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(monitoredFunction0)), -2213.303500073061, 882.947831285651);
org.junit.Assert.assertEquals(31, bisectionSolver0.getIterationCount());
org.junit.Assert.assertEquals(-2.659401939745394E-7, double0, 0.01);
}
@org.junit.Test(timeout = 4000)
public void test18() throws java.lang.Throwable {
org.apache.commons.math.analysis.SinFunction sinFunction0 = new org.apache.commons.math.analysis.SinFunction();
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(sinFunction0)));
bisectionSolver0.setMaximalIterationCount(8);
try {
bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(sinFunction0)), -4469.7843254918, 0.0);
org.junit.Assert.fail("Expecting exception: Exception");
} catch (java.lang.Exception e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
@org.junit.Test(timeout = 4000)
public void test19() throws java.lang.Throwable {
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
org.apache.commons.math.analysis.MonitoredFunction monitoredFunction0 = new org.apache.commons.math.analysis.MonitoredFunction(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)));
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver(((org.apache.commons.math.analysis.UnivariateRealFunction)(monitoredFunction0)));
try {
bisectionSolver0.solve(548.91, -2.659401939745394E-7);
org.junit.Assert.fail("Expecting exception: IllegalArgumentException");
} catch (java.lang.IllegalArgumentException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.MathRuntimeException", e);
}
}
@org.junit.Test(timeout = 4000)
public void test20() throws java.lang.Throwable {
org.apache.commons.math.analysis.solvers.BisectionSolver bisectionSolver0 = new org.apache.commons.math.analysis.solvers.BisectionSolver();
org.apache.commons.math.analysis.Expm1Function expm1Function0 = new org.apache.commons.math.analysis.Expm1Function();
try {
bisectionSolver0.solve(((org.apache.commons.math.analysis.UnivariateRealFunction)(expm1Function0)), 1.0E-15, 6.8147913021677, 6.8147913021677);
org.junit.Assert.fail("Expecting exception: NullPointerException");
} catch (java.lang.NullPointerException e) {
org.evosuite.runtime.EvoAssertions.assertThrownBy("org.apache.commons.math.analysis.solvers.BisectionSolver", e);
}
}
}
| gpl-2.0 |
b-cuts/esper | esper/src/main/java/com/espertech/esper/epl/agg/service/AggSvcGroupAllMixedAccessFactory.java | 2480 | /**************************************************************************************
* Copyright (C) 2006-2015 EsperTech Inc. All rights reserved. *
* http://www.espertech.com/esper *
* http://www.espertech.com *
* ---------------------------------------------------------------------------------- *
* The software in this package is published under the terms of the GPL license *
* a copy of which has been included with this distribution in the license.txt file. *
**************************************************************************************/
package com.espertech.esper.epl.agg.service;
import com.espertech.esper.core.context.util.AgentInstanceContext;
import com.espertech.esper.epl.agg.access.AggregationAccessorSlotPair;
import com.espertech.esper.epl.agg.access.AggregationState;
import com.espertech.esper.epl.agg.aggregator.AggregationMethod;
import com.espertech.esper.epl.core.MethodResolutionService;
import com.espertech.esper.epl.expression.core.ExprEvaluator;
/**
* Implementation for handling aggregation without any grouping (no group-by).
*/
public class AggSvcGroupAllMixedAccessFactory extends AggregationServiceFactoryBase
{
protected final AggregationAccessorSlotPair[] accessors;
protected final AggregationStateFactory[] accessAggregations;
protected final boolean isJoin;
public AggSvcGroupAllMixedAccessFactory(ExprEvaluator evaluators[], AggregationMethodFactory aggregators[], Object groupKeyBinding, AggregationAccessorSlotPair[] accessors, AggregationStateFactory[] accessAggregations, boolean join) {
super(evaluators, aggregators, groupKeyBinding);
this.accessors = accessors;
this.accessAggregations = accessAggregations;
isJoin = join;
}
public AggregationService makeService(AgentInstanceContext agentInstanceContext, MethodResolutionService methodResolutionService) {
AggregationState[] states = methodResolutionService.newAccesses(agentInstanceContext.getAgentInstanceId(), isJoin, accessAggregations);
AggregationMethod[] aggregatorsAgentInstance = methodResolutionService.newAggregators(super.aggregators, agentInstanceContext.getAgentInstanceId());
return new AggSvcGroupAllMixedAccessImpl(evaluators, aggregatorsAgentInstance, accessors, states, aggregators, accessAggregations);
}
} | gpl-2.0 |
idandan/AcDisplay | project/app/src/main/java/com/achep/acdisplay/services/BathService.java | 12440 | /*
* Copyright (C) 2014 AChep@xda <artemchep@gmail.com>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package com.achep.acdisplay.services;
import android.app.Notification;
import android.app.NotificationManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.IBinder;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.content.LocalBroadcastManager;
import android.text.TextUtils;
import android.util.Log;
import com.achep.acdisplay.App;
import com.achep.acdisplay.R;
import com.achep.acdisplay.notifications.NotificationHelper;
import com.achep.acdisplay.ui.activities.MainActivity;
import com.achep.base.AppHeap;
import com.achep.base.interfaces.IOnLowMemory;
import com.achep.base.services.BaseService;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import static com.achep.base.Build.DEBUG;
/**
* Created by achep on 26.05.14.
*
* @author Artem Chepurnoy
*/
public class BathService extends BaseService {
private static final String TAG = "BathService";
private static final String ACTION_ADD_SERVICE = TAG + ":add_service";
private static final String ACTION_REMOVE_SERVICE = TAG + ":remove_service";
private static final String EXTRA_SERVICE_CLASS = "class";
public static void startService(Context context, Class<? extends ChildService> clazz) {
synchronized (monitor) {
if (sRunning) {
Intent intent = new Intent(ACTION_ADD_SERVICE);
intent.putExtra(EXTRA_SERVICE_CLASS, clazz);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
} else if (!sServiceMap.containsKey(clazz)) {
ChildService instance;
try {
instance = clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e);
}
sServiceMap.put(clazz, instance);
if (!sCreated) context.startService(new Intent(context, BathService.class));
}
}
}
public static void stopService(Context context, Class<? extends ChildService> clazz) {
synchronized (monitor) {
if (sRunning) {
Intent intent = new Intent(ACTION_REMOVE_SERVICE);
intent.putExtra(EXTRA_SERVICE_CLASS, clazz);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
} else {
sServiceMap.remove(clazz);
}
}
}
private static final Map<Class, ChildService> sServiceMap = new ConcurrentHashMap<>(2);
private static final Object monitor = new Object();
private static boolean sCreated;
private static boolean sRunning;
private LocalBroadcastManager mLocalBroadcastManager;
private NotificationManager mNotificationManager;
private String mLanguage;
private final Map<Class, ChildService> mMap = new HashMap<>(2);
private final BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
final String action = intent.getAction();
switch (action) {
// Received from a local broadcast receiver.
case ACTION_ADD_SERVICE:
case ACTION_REMOVE_SERVICE:
synchronized (monitor) {
Class clazz = (Class) intent.getSerializableExtra(EXTRA_SERVICE_CLASS);
boolean addition = ACTION_ADD_SERVICE.equals(action);
boolean exists = mMap.containsKey(clazz);
if (addition == exists) return;
if (addition) { // Addition
ChildService child;
try {
// Adding child to host service.
child = (ChildService) clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e); // Should never happen
}
child.setContext(BathService.this);
child.onCreate();
mMap.put(clazz, child);
updateNotification();
} else { // Removal
ChildService child = mMap.remove(clazz);
child.onDestroy();
child.setContext(null);
if (mMap.isEmpty()) {
stopMySelf();
} else updateNotification();
}
}
break;
// Received from a system broadcast receiver.
case Intent.ACTION_CONFIGURATION_CHANGED:
String lang = getResources().getConfiguration().locale.getLanguage();
if (!TextUtils.equals(mLanguage, lang)) {
mLanguage = lang;
updateNotification();
}
break;
}
}
};
@Override
public void onCreate() {
super.onCreate();
mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
mLanguage = getResources().getConfiguration().locale.getLanguage();
// Listen for the config changes to update notification just
// once locale has changed.
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(Intent.ACTION_CONFIGURATION_CHANGED);
registerReceiver(mReceiver, intentFilter);
synchronized (monitor) {
sCreated = true;
sRunning = true;
// Register for add / remove service events.
intentFilter = new IntentFilter();
intentFilter.addAction(ACTION_ADD_SERVICE);
intentFilter.addAction(ACTION_REMOVE_SERVICE);
mLocalBroadcastManager = LocalBroadcastManager.getInstance(this);
mLocalBroadcastManager.registerReceiver(mReceiver, intentFilter);
if (sServiceMap.isEmpty()) {
stopMySelf();
} else {
// Init all children
Set<Map.Entry<Class, ChildService>> set = sServiceMap.entrySet();
for (Map.Entry<Class, ChildService> entry : set) {
ChildService child = entry.getValue();
child.setContext(this);
child.onCreate();
mMap.put(entry.getKey(), child);
}
sServiceMap.clear();
startForeground(App.ID_NOTIFY_BATH, buildNotification());
}
}
}
@Override
public void onLowMemory() {
super.onLowMemory();
synchronized (monitor) {
for (ChildService child : mMap.values()) {
child.onLowMemory();
}
}
}
@Override
public void onDestroy() {
super.onDestroy();
synchronized (monitor) {
sCreated = false;
sRunning = false;
mLocalBroadcastManager.unregisterReceiver(mReceiver);
// Kill all children.
for (ChildService child : mMap.values()) child.onDestroy();
mMap.clear();
// TODO: Should I add children back to pending map and then
// restart the service?
if (!sServiceMap.isEmpty()) startService(new Intent(this, getClass()));
}
unregisterReceiver(mReceiver);
// Make sure that notification does not exists.
mNotificationManager.cancel(App.ID_NOTIFY_BATH);
// Leaks canary
AppHeap.getRefWatcher().watch(this);
}
private void stopMySelf() {
sRunning = false;
stopSelf();
}
private void updateNotification() {
mNotificationManager.notify(App.ID_NOTIFY_BATH, buildNotification());
}
/**
* Builds fresh notification with all {@link ChildService children services}'s
* {@link com.achep.acdisplay.services.BathService.ChildService#getLabel() labels} in.
*/
@NonNull
private Notification buildNotification() {
boolean empty = true;
StringBuilder sb = new StringBuilder();
String divider = getString(R.string.settings_multi_list_divider);
for (ChildService child : mMap.values()) {
String label = child.getLabel();
if (TextUtils.isEmpty(label)) {
if (DEBUG) {
label = "[" + child.getClass().getSimpleName() + "]";
} else continue;
}
if (!empty) {
sb.append(divider);
}
sb.append(label);
empty = false;
}
// Format a message text.
String contentText = sb.toString();
if (contentText.length() > 0 && !mLanguage.contains("de")) {
contentText = contentText.charAt(0) + contentText.substring(1).toLowerCase();
}
// Get notification intent.
Intent intent = null;
for (ChildService child : mMap.values())
if (!TextUtils.isEmpty(child.getLabel())) {
if (intent == null) {
intent = child.getSettingsIntent();
} else {
intent = null;
break;
}
}
if (intent == null) {
intent = new Intent(this, MainActivity.class);
}
return NotificationHelper.buildNotification(this, App.ID_NOTIFY_BATH, contentText, intent);
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
//-- CHILD SERVICE --------------------------------------------------------
/**
* Base for fake foreground service hosted in {@link com.achep.acdisplay.services.BathService}.
* Call {@link BathService#startService(android.content.Context, Class)} to start this service,
* and {@link BathService#stopService(android.content.Context, Class)} to stop.
*
* @author Artem Chepurnoy
*/
public abstract static class ChildService implements IOnLowMemory {
private Context mContext;
public ChildService() {
if (DEBUG) {
Log.d(TAG, "Creating " + getClass().getSimpleName() + " service...");
}
}
final void setContext(Context context) {
mContext = context;
}
/**
* Called when fake-service is attached to main one.
*
* @see android.app.Service#onCreate()
*/
public abstract void onCreate();
/**
* Called when fake-service is detached from main one.
*
* @see android.app.Service#onDestroy()
*/
public abstract void onDestroy();
/**
* {@inheritDoc}
*/
@Override
public void onLowMemory() { /* placeholder */ }
/**
* @return The human-readable label of this service.
*/
@Nullable
public String getLabel() {
return null;
}
@Nullable
public Intent getSettingsIntent() {
return null;
}
public final Context getContext() {
return mContext;
}
}
}
| gpl-2.0 |
md-5/jdk10 | src/jdk.internal.vm.ci/share/classes/jdk.vm.ci.hotspot/src/jdk/vm/ci/hotspot/HotSpotCodeCacheProvider.java | 7742 | /*
* Copyright (c) 2013, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.vm.ci.hotspot;
import java.util.Map;
import java.util.Objects;
import jdk.vm.ci.code.BailoutException;
import jdk.vm.ci.code.BytecodeFrame;
import jdk.vm.ci.code.CodeCacheProvider;
import jdk.vm.ci.code.CompiledCode;
import jdk.vm.ci.code.InstalledCode;
import jdk.vm.ci.code.RegisterConfig;
import jdk.vm.ci.code.TargetDescription;
import jdk.vm.ci.code.site.Call;
import jdk.vm.ci.code.site.Mark;
import jdk.vm.ci.meta.ResolvedJavaMethod;
import jdk.vm.ci.meta.SpeculationLog;
/**
* HotSpot implementation of {@link CodeCacheProvider}.
*/
public class HotSpotCodeCacheProvider implements CodeCacheProvider {
protected final HotSpotJVMCIRuntime runtime;
private final HotSpotVMConfig config;
protected final TargetDescription target;
protected final RegisterConfig regConfig;
public HotSpotCodeCacheProvider(HotSpotJVMCIRuntime runtime, TargetDescription target, RegisterConfig regConfig) {
this.runtime = runtime;
this.config = runtime.getConfig();
this.target = target;
this.regConfig = regConfig;
}
@Override
public String getMarkName(Mark mark) {
int markId = (int) mark.id;
HotSpotVMConfigStore store = runtime.getConfigStore();
for (Map.Entry<String, Long> e : store.getConstants().entrySet()) {
String name = e.getKey();
if (name.startsWith("MARKID_") && e.getValue() == markId) {
return name;
}
}
return CodeCacheProvider.super.getMarkName(mark);
}
/**
* Decodes a call target to a mnemonic if possible.
*/
@Override
public String getTargetName(Call call) {
if (call.target instanceof HotSpotForeignCallTarget) {
long address = ((HotSpotForeignCallTarget) call.target).address;
HotSpotVMConfigStore store = runtime.getConfigStore();
for (Map.Entry<String, VMField> e : store.getFields().entrySet()) {
VMField field = e.getValue();
if (field.isStatic() && field.value != null && field.value instanceof Long && ((Long) field.value) == address) {
return e.getValue() + ":0x" + Long.toHexString(address);
}
}
}
return CodeCacheProvider.super.getTargetName(call);
}
@Override
public RegisterConfig getRegisterConfig() {
return regConfig;
}
@Override
public int getMinimumOutgoingSize() {
return config.runtimeCallStackSize;
}
private InstalledCode logOrDump(InstalledCode installedCode, CompiledCode compiledCode) {
runtime.notifyInstall(this, installedCode, compiledCode);
return installedCode;
}
@Override
public InstalledCode installCode(ResolvedJavaMethod method, CompiledCode compiledCode, InstalledCode installedCode, SpeculationLog log, boolean isDefault) {
InstalledCode resultInstalledCode;
if (installedCode != null) {
throw new IllegalArgumentException("InstalledCode argument must be null");
}
HotSpotCompiledCode hsCompiledCode = (HotSpotCompiledCode) compiledCode;
String name = hsCompiledCode.getName();
HotSpotCompiledNmethod hsCompiledNmethod = null;
if (method == null) {
// Must be a stub
resultInstalledCode = new HotSpotRuntimeStub(name);
} else {
hsCompiledNmethod = (HotSpotCompiledNmethod) hsCompiledCode;
HotSpotResolvedJavaMethodImpl hsMethod = (HotSpotResolvedJavaMethodImpl) method;
resultInstalledCode = new HotSpotNmethod(hsMethod, name, isDefault, hsCompiledNmethod.id);
}
HotSpotSpeculationLog speculationLog = null;
if (log != null) {
if (log.hasSpeculations()) {
speculationLog = (HotSpotSpeculationLog) log;
}
}
byte[] speculations;
long failedSpeculationsAddress;
if (speculationLog != null) {
speculations = speculationLog.getFlattenedSpeculations(true);
failedSpeculationsAddress = speculationLog.getFailedSpeculationsAddress();
} else {
speculations = new byte[0];
failedSpeculationsAddress = 0L;
}
int result = runtime.getCompilerToVM().installCode(target, (HotSpotCompiledCode) compiledCode, resultInstalledCode, failedSpeculationsAddress, speculations);
if (result != config.codeInstallResultOk) {
String resultDesc = config.getCodeInstallResultDescription(result);
if (hsCompiledNmethod != null) {
String msg = hsCompiledNmethod.getInstallationFailureMessage();
if (msg != null) {
msg = String.format("Code installation failed: %s%n%s", resultDesc, msg);
} else {
msg = String.format("Code installation failed: %s", resultDesc);
}
throw new BailoutException(result != config.codeInstallResultDependenciesFailed, msg);
} else {
throw new BailoutException("Error installing %s: %s", ((HotSpotCompiledCode) compiledCode).getName(), resultDesc);
}
}
return logOrDump(resultInstalledCode, compiledCode);
}
@Override
public void invalidateInstalledCode(InstalledCode installedCode) {
if (installedCode instanceof HotSpotNmethod) {
runtime.getCompilerToVM().invalidateHotSpotNmethod((HotSpotNmethod) installedCode);
} else {
throw new IllegalArgumentException("Cannot invalidate a " + Objects.requireNonNull(installedCode).getClass().getName());
}
}
@Override
public TargetDescription getTarget() {
return target;
}
public String disassemble(InstalledCode code) {
if (code.isValid()) {
return runtime.getCompilerToVM().disassembleCodeBlob(code);
}
return null;
}
@Override
public SpeculationLog createSpeculationLog() {
return new HotSpotSpeculationLog();
}
@Override
public long getMaxCallTargetOffset(long address) {
return runtime.getCompilerToVM().getMaxCallTargetOffset(address);
}
@Override
public boolean shouldDebugNonSafepoints() {
return runtime.getCompilerToVM().shouldDebugNonSafepoints();
}
public int interpreterFrameSize(BytecodeFrame pos) {
return runtime.getCompilerToVM().interpreterFrameSize(pos);
}
/**
* Resets all compilation statistics.
*/
public void resetCompilationStatistics() {
runtime.getCompilerToVM().resetCompilationStatistics();
}
}
| gpl-2.0 |
RangerRick/opennms | opennms-services/src/test/java/org/opennms/netmgt/scheduler/ScheduleTest.java | 6338 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2006-2012 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.scheduler;
import junit.framework.TestCase;
import org.opennms.core.test.MockLogAppender;
import org.opennms.netmgt.poller.mock.MockInterval;
import org.opennms.netmgt.poller.mock.MockScheduler;
/**
* Represents a ScheduleTest
*
* @author brozow
*/
public class ScheduleTest extends TestCase {
private MockSchedulable m_schedulable;
private MockInterval m_interval;
private MockScheduler m_scheduler;
private Schedule m_sched;
public static void main(String[] args) {
junit.textui.TestRunner.run(ScheduleTest.class);
}
class MockSchedulable implements ReadyRunnable {
private volatile int runCount = 0;
private volatile boolean m_callingAdjustSchedule;
public boolean isReady() {
return true;
}
public void run() {
runCount++;
if (isCallingAdjustSchedule())
m_sched.adjustSchedule();
}
public int getRunCount() {
return runCount;
}
public void setCallingAdjustSchedule(boolean callingAdjustSchedule) {
m_callingAdjustSchedule = callingAdjustSchedule;
}
public boolean isCallingAdjustSchedule() {
return m_callingAdjustSchedule;
}
}
/*
* @see TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
MockLogAppender.setupLogging();
m_schedulable = new MockSchedulable();
m_scheduler = new MockScheduler();
m_interval = new MockInterval(m_scheduler, 1000L);
m_sched = new Schedule(m_schedulable, m_interval, m_scheduler);
}
/*
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
MockLogAppender.assertNoWarningsOrGreater();
super.tearDown();
}
public void testSchedule() {
m_sched.schedule();
assertRunAndScheduled(0, 0, 0, 1);
m_scheduler.next();
assertRunAndScheduled(0, 1000, 1, 1);
m_scheduler.next();
assertRunAndScheduled(1000, 1000, 2, 1);
}
public void testAdjustSchedule() {
m_sched.schedule();
assertRunAndScheduled(0, 0, 0, 1);
m_scheduler.next();
m_interval.setInterval(900);
m_sched.adjustSchedule();
assertRunAndScheduled(0, 900, 1, 2);
m_scheduler.next();
assertRunAndScheduled(900, 900, 2, 2);
// jump to the expired entry
m_scheduler.next();
// note that we don't increase the run count
assertRunAndScheduled(1000, 800, 2, 1);
m_scheduler.next();
assertRunAndScheduled(1800, 900, 3, 1);
m_scheduler.next();
assertRunAndScheduled(2700, 900, 4, 1);
m_interval.setInterval(1000);
m_sched.adjustSchedule();
// jump to the expired entry
m_scheduler.next();
assertRunAndScheduled(3600, 100, 4, 1);
m_scheduler.next();
assertRunAndScheduled(3700, 1000, 5, 1);
}
public void testUnschedule() {
m_sched.schedule();
assertRunAndScheduled(0, 0, 0, 1);
m_scheduler.next();
assertRunAndScheduled(0, 1000, 1, 1);
m_scheduler.next();
assertRunAndScheduled(1000, 1000, 2, 1);
m_sched.unschedule();
// jump to the expired entry
m_scheduler.next();
assertRunAndScheduled(2000, -1, 2, 0);
}
public void testTemporarilySuspend() {
m_interval.addSuspension(1500, 2500);
m_sched.schedule();
assertRunAndScheduled(0, 0, 0, 1);
m_scheduler.next();
assertRunAndScheduled(0, 1000, 1, 1);
m_scheduler.next();
assertRunAndScheduled(1000, 1000, 2, 1);
// this is the suspended entry
m_scheduler.next();
// assert that the entry has not run
assertRunAndScheduled(2000, 1000, 2, 1);
m_scheduler.next();
assertRunAndScheduled(3000, 1000, 3, 1);
}
public void testAdjustScheduleWithinRun() {
m_schedulable.setCallingAdjustSchedule(true);
m_sched.schedule();
assertRunAndScheduled(0, 0, 0, 1);
m_scheduler.next();
assertRunAndScheduled(0, 1000, 1, 1);
m_scheduler.next();
assertRunAndScheduled(1000, 1000, 2, 1);
}
private void assertRunAndScheduled(long currentTime, long interval, int count, int entryCount) {
assertEquals(count, m_schedulable.getRunCount());
assertEquals(currentTime, m_scheduler.getCurrentTime());
assertEquals(entryCount, m_scheduler.getEntryCount());
if (entryCount > 0)
assertNotNull(m_scheduler.getEntries().get(Long.valueOf(currentTime+interval)));
}
}
| gpl-2.0 |
hgl888/AcDisplay | project/app/src/main/java/com/achep/acdisplay/receiver/ReceiverPublic.java | 2743 | /*
* Copyright (C) 2014 AChep@xda <artemchep@gmail.com>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package com.achep.acdisplay.receiver;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import com.achep.acdisplay.App;
import com.achep.acdisplay.Config;
import com.achep.acdisplay.R;
import com.achep.base.utils.ToastUtils;
/**
* Created by Artem on 11.03.14.
*/
public class ReceiverPublic extends BroadcastReceiver {
private static final String TAG = "PublicReceiver";
@Override
public void onReceive(Context context, Intent intent) {
Config config = Config.getInstance();
String action = intent.getAction();
switch (action) {
case App.ACTION_ENABLE:
Log.i(TAG, "Enabling AcDisplay by intent. " + intent);
setAcDisplayEnabled(context, config, true);
break;
case App.ACTION_DISABLE:
Log.i(TAG, "Disabling AcDisplay by intent. " + intent);
setAcDisplayEnabled(context, config, false);
break;
case App.ACTION_TOGGLE:
Log.i(TAG, "Toggling AcDisplay by intent. " + intent);
setAcDisplayEnabled(context, config, !config.isEnabled());
break;
}
}
/**
* Tries to {@link com.achep.acdisplay.Config#setEnabled(android.content.Context, boolean, com.achep.acdisplay.Config.OnConfigChangedListener) enable / disable }
* AcDisplay and shows toast message about the result.
*
* @param enable {@code true} to enable AcDisplay, {@code false} to disable.
*/
private void setAcDisplayEnabled(Context context, Config config, boolean enable) {
enable &= App.getAccessManager().getMasterPermissions().isActive();
config.setEnabled(context, enable, null);
ToastUtils.showLong(context, enable
? R.string.remote_enable_acdisplay
: R.string.remote_disable_acdisplay);
}
} | gpl-2.0 |
ekummerfeld/GdistanceP | tetrad-lib/src/main/java/edu/cmu/tetrad/search/IndTestTimeSeries.java | 67773 | ///////////////////////////////////////////////////////////////////////////////
// For information as to what this class does, see the Javadoc, below. //
// Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, //
// 2007, 2008, 2009, 2010, 2014, 2015 by Peter Spirtes, Richard Scheines, Joseph //
// Ramsey, and Clark Glymour. //
// //
// This program is free software; you can redistribute it and/or modify //
// it under the terms of the GNU General Public License as published by //
// the Free Software Foundation; either version 2 of the License, or //
// (at your option) any later version. //
// //
// This program is distributed in the hope that it will be useful, //
// but WITHOUT ANY WARRANTY; without even the implied warranty of //
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //
// GNU General Public License for more details. //
// //
// You should have received a copy of the GNU General Public License //
// along with this program; if not, write to the Free Software //
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA //
///////////////////////////////////////////////////////////////////////////////
package edu.cmu.tetrad.search;
import edu.cmu.tetrad.data.DataSet;
import edu.cmu.tetrad.data.ICovarianceMatrix;
import edu.cmu.tetrad.graph.Node;
import edu.cmu.tetrad.util.IndexedMatrix;
import edu.cmu.tetrad.util.MatrixUtils;
import edu.cmu.tetrad.util.ProbUtils;
import edu.cmu.tetrad.util.TetradMatrix;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* <p>Checks independence facts for time series data. The method is described in Alessio Moneta, "Graphical Models for
* Structural Vector Autoregressions."</p>
*
* @author Joseph Ramsey
*/
public final class IndTestTimeSeries implements IndependenceTest {
/**
* The vars of the correlation matrix, in order.
*/
private final List<Node> vars;
/**
* Input time series data, n times x k vars.
*/
private final TetradMatrix data;
/**
* The number of time points for which data is available.
*/
private final int numTimeSteps;
/**
* The number of vars.
*/
private final int numVars;
/**
* The significance level of the independence tests.
*/
private double alpha;
/**
* The number of indices k for which the k-th row of the data is regressed onto rows k - 1, k - 2, ..., k -
* numLags().
*/
private int numReps;
/**
* <p>One plus the the number of previous rows in the data that each examined row is regressed onto.</p>
*/
private int numLags;
/**
* Stored Sigma_u; this only needs to be calculated once but gets reused a lot.
*/
private transient double[][] sigmaU;
/**
* Stored omega; this only needs to be calculated once.
*/
private transient double[][] omega;
/**
* An indexed version of sigmaU; used repeatedly to calculate basic tau gradients.
*/
private transient IndexedMatrix indexedCorr;
/**
* True if the stationary algorithm is to be used; false if the non- stationary algorithm is to be used.
*
* @serial
*/
private boolean stationary;
/**
* @serial
*/
private double chiSquare;
//=============================CONSRUCTORS============================//
/**
* Constructs a new independence test based on Moneta, "Graphical Models for Structural Vector Autoregressions."
*
* @param data Row major matrix of data for each of the variables in vars for each time step. Dimensions
* numTimeSteps x numVars. Time steps are assumed to be in increasing order, so that time(data[i][]) <
* time(data[j][]) for i < j.
* @param vars The variables over which the data is (repeatedly) measured. The number of variables must equal the
* number of columns in the data-- that is, vars.size() == data[i].length for each i.
*/
public IndTestTimeSeries(TetradMatrix data, List<Node> vars) {
if (data == null) {
throw new NullPointerException("Data must not be null.");
}
if (vars == null) {
throw new NullPointerException(
"Variables must not be a null list.");
}
for (int i = 0; i < vars.size(); i++) {
if (vars.get(i) == null) {
throw new NullPointerException(
"Variable at index " + i + " must not be null.");
}
}
this.data = data;
this.numTimeSteps = this.data.rows();
this.numVars = this.data.columns();
this.vars = Collections.unmodifiableList(vars);
this.setNumLags(1);
this.setAlpha(0.05);
}
//===========================PUBLIC METHODS============================//
/**
* Required by IndependenceTest.
*/
public IndependenceTest indTestSubset(List vars) {
throw new UnsupportedOperationException();
}
/**
* Determines whether variable x is independent of variable data given a list of up to four conditioning vars z;
* beyond four conditioning variables, false is always returned.
*
* @param x the one variable being compared.
* @param y the second variable being compared.
* @param z the list of conditioning vars.
* @return true iff x _||_ data | z.
*/
public boolean isIndependent(Node x, Node y, List<Node> z) {
if (z == null) {
throw new NullPointerException();
}
for (Node node : z) {
if (node == null) {
throw new NullPointerException();
}
}
int[] indices = createIndexArray(z, x, y);
return isIndependent(indices);
}
public boolean isIndependent(Node x, Node y, Node... z) {
List<Node> zList = Arrays.asList(z);
return isIndependent(x, y, zList);
}
public boolean isDependent(Node x, Node y, List<Node> z) {
return !isIndependent(x, y, z);
}
public boolean isDependent(Node x, Node y, Node... z) {
List<Node> zList = Arrays.asList(z);
return isDependent(x, y, zList);
}
/**
* @return true iff according to the test var(indices[0]) _||_ var(indices[1] | var(indices[2], ...,
* var(indices[indices.length - 1]).
*/
public boolean isIndependent(int[] indices) {
// Tests whether the given index array is legal.
setIndices(indices);
// We only test independence up to 4 conditioning vars.
if (indices.length > 6) {
return false;
}
// int i = 1;
// testPrint("xPrime" + i, xPrime(i));
// System.out.println("---->" + getNumReps());
// Calculate chi square value.
double temp = Math.pow(tau(), 2.0);
double numerator = getNumReps() * temp;
// System.out.println("Numerator = " + numerator);
double[][] gradTau = gradTau();
double[][] gradTauPrime = MatrixUtils.transpose(gradTau);
double[][] prod1 = MatrixUtils.product(gradTauPrime, omega());
double[][] prod2 = MatrixUtils.product(prod1, gradTau);
assert (MatrixUtils.hasDimensions(prod2, 1, 1));
double denominator = prod2[0][0];
System.out.println("ratio w/o T = " + temp / denominator);
// System.out.println("Denominator = " + denominator);
double chiSquare = numerator / denominator;
this.chiSquare = chiSquare;
// System.out.println("chi square = " + chiSquare);
// Compare chi square value to cutoff.
double pValue = 1.0 - ProbUtils.chisqCdf(chiSquare, 1);
return pValue > this.alpha;
}
//
// private void testPrint(String message, double[][] arr) {
// System.out.println(message);
// System.out.println(MatrixUtils.toString(arr,
// new DecimalFormat("0.00E00")));
// }
//
// private void testPrint(String message, double[] arr) {
// System.out.println(message);
// System.out.println(
// ArrUtils.toString(arr, new DecimalFormat("0.00E00")));
// }
/**
* @return the list of variable varNames.
*/
public List<String> getVariableNames() {
List<Node> variables = getVariables();
List<String> variableNames = new ArrayList<>();
for (Node variable : variables) {
variableNames.add(variable.getName());
}
return variableNames;
}
public boolean determines(List<Node> z, Node x1) {
throw new UnsupportedOperationException(
"This independence test does not " +
"test whether Z determines X for list Z of variable and variable X.");
}
public double getAlpha() {
return alpha;
}
public Node getVariable(String name) {
for (int i = 0; i < getVariables().size(); i++) {
Node variable = getVariables().get(i);
if (variable.getName().equals(name)) {
return variable;
}
}
return null;
}
/**
* @return the (unmodifiable) list of vars.
*/
public List<Node> getVariables() {
return this.vars;
}
/**
* Sets the significance level for statistical tests. By default, this is 0.05.
*/
public void setAlpha(double alpha) {
if (alpha < 0.0 || alpha > 1.0) {
throw new IllegalArgumentException(
"Alpha must be in [0.0, 1.0]: " + alpha);
}
this.alpha = alpha;
}
/**
* The number of indices k for which the k-th row of the data is regressed onto rows k - 1, k - 2, ..., k -
* numLags().
*/
public int getNumReps() {
return numReps;
}
/**
* @return the number of lags.
*/
public int getNumLags() {
return numLags;
}
/**
* Sets the number of lags; the number of reps is 1 - numLags.
*/
public void setNumLags(int numLags) {
if (numLags < 1 || numLags > getNumTimeSteps() - 1) {
throw new IllegalArgumentException("numLags must be in [1, " +
"numTimePoints - 1]: " + numLags);
}
this.numLags = numLags;
this.numReps = getNumTimeSteps() - numLags;
reset();
}
/**
* Sets the number of lags. Note that the number of lags plus the lag size must be <= the number of times.
*/
public void setDataView(int numReps, int numLags) {
if (numLags < 1) {
throw new IllegalArgumentException("numLags must be > 0.");
}
if (numLags + numReps > getNumTimeSteps()) {
throw new IllegalArgumentException(
"NumLags + numReps must be " + "<= numTimeSteps.");
}
this.numLags = numLags;
this.numReps = numReps;
reset();
}
/**
* @return the number of time steps in the data.
*/
public int getNumTimeSteps() {
return numTimeSteps;
}
/**
* True if the stationary algorithm is to be used; false if the non- stationary algorithm is to be used.
*/
public boolean isStationary() {
return this.stationary;
}
/**
* True if the stationary algorithm is to be used; false if the non- stationary algorithm is to be used.
*/
public void setStationary(boolean stationary) {
this.stationary = stationary;
}
/**
* Needed for the IndependenceTest interface. Probably not meaningful here.
*/
public double getPValue() {
return Double.NaN;
}
//==========================PRIVATE METHODS============================//
private void reset() {
this.sigmaU = null;
this.omega = null;
this.indexedCorr = null;
}
private IndexedMatrix indexedCorr() {
if (this.indexedCorr == null) {
this.indexedCorr = new IndexedMatrix(sigmaU());
}
return this.indexedCorr;
}
private void setIndices(int[] indices) {
// The indices are stored in the IndexedMatrix. (No need to duplicate.)
indexedCorr().setIndices(indices);
}
private int[] getIndices() {
// The indices are stored in the IndexedMatrix. (No need to duplicate.)
return indexedCorr().getIndices();
}
private int[] createIndexArray(List<Node> z, Node x, Node y) {
int[] indices = new int[z.size() + 2];
indices[0] = getVariables().indexOf(x);
indices[1] = getVariables().indexOf(y);
for (int i = 0; i < z.size(); i++) {
indices[i + 2] = getVariables().indexOf(z.get(i));
}
for (int index : indices) {
if (index < 0) {
throw new IllegalArgumentException("Some variable was no in " +
"the constructed list of vars.");
}
}
return indices;
}
/**
* @param tIndex an int in the range [numReps - numTimeSteps + 1, numReps].
* @return the row of the data indexed so that the last row has index numReps and the first row has index numReps -
* numTimeSteps + 1.
*/
private double[][] yPrime(int tIndex) {
// double[][] yPrime = new double[1][numVars];
int transformedIndex = getNumTimeSteps() - getNumReps() + tIndex - 1;
// System.out.println("tIndex = " + tIndex + ", transformed index = " + transformedIndex);
return data.getPart(transformedIndex, 0, 1, data.columns()).toArray();
// System.arraycopy(data[transformedIndex], 0, yPrime[0], 0, numVars);
// return yPrime;
}
/**
* Constructs the x(t) vector.
*/
private double[][] xPrime(int t) {
double[][] x = new double[1][getNumLags() * numVars];
for (int i = 0; i < getNumLags(); i++) {
double[][] yPrime = yPrime(t - i - 1);
System.arraycopy(yPrime[0], 0, x[0], i * numVars, numVars);
}
return x;
}
private double[][] piPrime() {
double[][] ma = MatrixUtils.zeros(numVars, numVars * numLags);
for (int t = 1; t <= getNumReps(); t++) {
double[][] summand = MatrixUtils.product(y(t), xPrime(t));
ma = MatrixUtils.sum(ma, summand);
}
double[][] mb = MatrixUtils.zeros(numVars * numLags, numVars * numLags);
for (int t = 1; t <= getNumReps(); t++) {
double[][] summand = MatrixUtils.product(x(t), xPrime(t));
mb = MatrixUtils.sum(mb, summand);
}
double[][] mbinv = MatrixUtils.inverse(mb);
// double[][] a = MatrixUtils.outerProduct(mb, mbinv);
// testPrint("a", a);
double[][] prod = MatrixUtils.product(ma, mbinv);
assert MatrixUtils.hasDimensions(prod, numVars, numVars * numLags);
// testPrint("piprime", prod);
return prod;
}
private double[][] y(int t) {
return MatrixUtils.transpose(yPrime(t));
}
private double[][] x(int t) {
return MatrixUtils.transpose(xPrime(t));
}
private double[][] u(double[][] piPrime, int t) {
return MatrixUtils.subtract(y(t), MatrixUtils.product(piPrime, x(t)));
}
/**
* @return Sigma_u.
*/
private double[][] sigmaU() {
if (this.sigmaU == null) {
return isStationary() ? sigmaUStationary() : sigmaUNonStationary();
}
return this.sigmaU;
}
private double[][] sigmaUStationary() {
// Precalculate to avoid inverting the same huge matrix ad nauseum.
double[][] piPrime = piPrime();
double[][] sum = MatrixUtils.zeros(numVars, numVars);
for (int t = 1; t <= getNumReps(); t++) {
double[][] u = u(piPrime, t);
double[][] uPrime = MatrixUtils.transpose(u);
double[][] product = MatrixUtils.product(u, uPrime);
sum = MatrixUtils.sum(sum, product);
}
return MatrixUtils.scalarProduct(1.0 / getNumReps(), sum);
}
private double[][] dkPlus() {
double[][] dk = MatrixUtils.vechToVecLeft(numVars);
double[][] dkPrime = MatrixUtils.transpose(dk);
double[][] ma = MatrixUtils.product(dkPrime, dk);
double[][] mainv = MatrixUtils.inverse(ma);
return MatrixUtils.product(mainv, dkPrime);
}
private double[][] omega() {
if (this.omega == null) {
double[][] dkPlus = dkPlus();
double[][] dkPlusPrime = MatrixUtils.transpose(dkPlus);
double[][] prod1 = MatrixUtils.directProduct(sigmaU(), sigmaU());
double[][] prod2 = MatrixUtils.scalarProduct(2.0, dkPlus);
double[][] prod3 = MatrixUtils.product(prod2, prod1);
this.omega = MatrixUtils.product(prod3, dkPlusPrime);
}
return this.omega;
}
// The following methods are for calculating nonstationary SigmaU.
private double[][] sigmaUNonStationary() {
// Precalculate to avoid inverting the same huge matrix ad nauseum.
double[][] piPrime = piPrime();
double[][] sum = MatrixUtils.zeros(numVars, numVars);
for (int t = 1; t <= getNumReps(); t++) {
double[][] u = u(piPrime, t);
double[][] uPrime = MatrixUtils.transpose(u);
double[][] product = MatrixUtils.product(u, uPrime);
sum = MatrixUtils.sum(sum, product);
}
return MatrixUtils.scalarProduct(1.0 / getNumReps(), sum);
}
// private double[][] deltaY(int t) {
// return MatrixUtils.subtract(y(t), y(t - 1));
// }
// private double[][] deltaY() {
// double[][] deltaY = new double[numVars][numReps];
// for (int i = 0; i < numReps; i++) {
// double[][] col = deltaY(i + 1);
// MatrixUtils.pasteCol(col, 0, deltaY, i);
// }
// return deltaY;
// }
// private double[][] deltaX(int t) {
// double[] deltaXVec = new double[numVars * (numLags - 1) + 1];
// deltaXVec[0] = 1;
//
// for (int i = 0; i < getNumLags() - 1; i++) {
// double[][] deltaY = deltaY(t - i);
// System.arraycopy(deltaY, 0, deltaXVec, i * numVars, numVars);
// }
//
// return MatrixUtils.asCol(deltaXVec);
//
// }
// private double[][] deltaX() {
// double[][] deltaX = new double[numVars * (numLags - 1)][numReps];
//
// for (int i = 0; i < numLags; i++) {
// MatrixUtils.pasteCol(deltaX(i), 0, deltaX, i);
// }
//
// return deltaX;
// }
// private double[][] yP() {
// double[][] deltaY = new double[numVars][numReps];
// for (int i = 0; i < numReps; i++) {
// double[][] col = y(i + 1 - numLags);
// MatrixUtils.pasteCol(col, 0, deltaY, i);
// }
// return deltaY;
// }
// private double[][] m() {
// double[][] iT = MatrixUtils.identity(numLags);
// double[][] deltaX = deltaX();
// double[][] deltaXPrime = MatrixUtils.transpose(deltaX);
// double[][] prod1 = MatrixUtils.product(deltaX, deltaXPrime);
// double[][] prod2 = MatrixUtils.product(deltaXPrime, prod1);
// double[][] prod3 = MatrixUtils.product(prod2, deltaX);
// return MatrixUtils.subtract(iT, prod3);
// }
// private double[][] r(int i) {
// switch (i) {
// case 0:
// return MatrixUtils.product(deltaY(), m());
// case 1:
// return MatrixUtils.product(yP(), m());
// default:
// throw new IllegalArgumentException("Index must be 0 or 1.");
// }
// }
// private double[][] s2(int i, int j) {
// double[][] m = MatrixUtils.outerProduct(r(i), r(j));
// return MatrixUtils.scalarProduct(1.0 / numLags, m);
// }
/**
* @return the chi square cutoff value for the given degrees of freedom and significance level.
*/
public double chiSquareCutoff() {
double d = 0.0;
for (int mantissa = 0; mantissa >= -15; mantissa--) {
double increment = Math.pow(10, mantissa);
while (d < 1000.0) {
d += increment;
if (ProbUtils.chisqCdf(d, 1.0) > 1.0 - getAlpha()) {
d -= increment;
break;
}
}
}
return d;
}
/**
* Calculates the gradient of tau for the given indices array (for up to 4 conditioning vars).
*/
private double tau() {
int numCondVars = getIndices().length - 2;
switch (numCondVars) {
case 0:
return tau0();
case 1:
return tau1();
case 2:
return tau2();
case 3:
return tau3();
case 4:
return tau4();
default:
throw new IllegalStateException("Only taus for up to " +
"four conditioning variables were hardcoded: " +
numCondVars);
}
}
/**
* @return tau for indArr = [1 2] (one-indexed). From Mathematica.
*/
private double tau0() {
return s(1, 2);
}
/**
* @return tau for indArr = [1 2 3] (one-indexed). From Mathematica.
*/
private double tau1() {
return -s(1, 3) * s(2, 3) + s(1, 2) * s(3, 3);
}
/**
* @return tau for indArr = [1 2 3 4] (one-indexed). From Mathematica.
*/
private double tau2() {
return -s(1, 4) * s(2, 4) * s(3, 3) + s(1, 4) * s(2, 3) * s(3, 4) +
s(1, 3) * s(2, 4) * s(3, 4) - s(1, 2) * s(3, 4) * s(3, 4) -
s(1, 3) * s(2, 3) * s(4, 4) + s(1, 2) * s(3, 3) * s(4, 4);
}
/**
* @return tau for indArr = [1 2 3 4 5] (one-indexed). From Mathematica.
*/
private double tau3() {
return s(1, 5) * s(2, 5) * s(3, 4) * s(3, 4) -
s(1, 5) * s(2, 4) * s(3, 4) * s(3, 5) -
s(1, 4) * s(2, 5) * s(3, 4) * s(3, 5) +
s(1, 4) * s(2, 4) * s(3, 5) * s(3, 5) -
s(1, 5) * s(2, 5) * s(3, 3) * s(4, 4) +
s(1, 5) * s(2, 3) * s(3, 5) * s(4, 4) +
s(1, 3) * s(2, 5) * s(3, 5) * s(4, 4) -
s(1, 2) * s(3, 5) * s(3, 5) * s(4, 4) +
s(1, 5) * s(2, 4) * s(3, 3) * s(4, 5) +
s(1, 4) * s(2, 5) * s(3, 3) * s(4, 5) -
s(1, 5) * s(2, 3) * s(3, 4) * s(4, 5) -
s(1, 3) * s(2, 5) * s(3, 4) * s(4, 5) -
s(1, 4) * s(2, 3) * s(3, 5) * s(4, 5) -
s(1, 3) * s(2, 4) * s(3, 5) * s(4, 5) +
2 * s(1, 2) * s(3, 4) * s(3, 5) * s(4, 5) +
s(1, 3) * s(2, 3) * s(4, 5) * s(4, 5) -
s(1, 2) * s(3, 3) * s(4, 5) * s(4, 5) -
s(1, 4) * s(2, 4) * s(3, 3) * s(5, 5) +
s(1, 4) * s(2, 3) * s(3, 4) * s(5, 5) +
s(1, 3) * s(2, 4) * s(3, 4) * s(5, 5) -
s(1, 2) * s(3, 4) * s(3, 4) * s(5, 5) -
s(1, 3) * s(2, 3) * s(4, 4) * s(5, 5) +
s(1, 2) * s(3, 3) * s(4, 4) * s(5, 5);
}
/**
* @return tau for indArr = [1 2 3 4 5 6] (one-indexed). From Mathematica.
*/
private double tau4() {
return s(1, 6) * s(2, 6) * s(3, 5) * s(3, 5) * s(4, 4) -
s(1, 6) * s(2, 5) * s(3, 5) * s(3, 6) * s(4, 4) -
s(1, 5) * s(2, 6) * s(3, 5) * s(3, 6) * s(4, 4) +
s(1, 5) * s(2, 5) * s(3, 6) * s(3, 6) * s(4, 4) -
2 * s(1, 6) * s(2, 6) * s(3, 4) * s(3, 5) * s(4, 5) +
s(1, 6) * s(2, 5) * s(3, 4) * s(3, 6) * s(4, 5) +
s(1, 5) * s(2, 6) * s(3, 4) * s(3, 6) * s(4, 5) +
s(1, 6) * s(2, 4) * s(3, 5) * s(3, 6) * s(4, 5) +
s(1, 4) * s(2, 6) * s(3, 5) * s(3, 6) * s(4, 5) -
s(1, 5) * s(2, 4) * s(3, 6) * s(3, 6) * s(4, 5) -
s(1, 4) * s(2, 5) * s(3, 6) * s(3, 6) * s(4, 5) +
s(1, 6) * s(2, 6) * s(3, 3) * s(4, 5) * s(4, 5) -
s(1, 6) * s(2, 3) * s(3, 6) * s(4, 5) * s(4, 5) -
s(1, 3) * s(2, 6) * s(3, 6) * s(4, 5) * s(4, 5) +
s(1, 2) * s(3, 6) * s(3, 6) * s(4, 5) * s(4, 5) +
s(1, 6) * s(2, 5) * s(3, 4) * s(3, 5) * s(4, 6) +
s(1, 5) * s(2, 6) * s(3, 4) * s(3, 5) * s(4, 6) -
s(1, 6) * s(2, 4) * s(3, 5) * s(3, 5) * s(4, 6) -
s(1, 4) * s(2, 6) * s(3, 5) * s(3, 5) * s(4, 6) -
2 * s(1, 5) * s(2, 5) * s(3, 4) * s(3, 6) * s(4, 6) +
s(1, 5) * s(2, 4) * s(3, 5) * s(3, 6) * s(4, 6) +
s(1, 4) * s(2, 5) * s(3, 5) * s(3, 6) * s(4, 6) -
s(1, 6) * s(2, 5) * s(3, 3) * s(4, 5) * s(4, 6) -
s(1, 5) * s(2, 6) * s(3, 3) * s(4, 5) * s(4, 6) +
s(1, 6) * s(2, 3) * s(3, 5) * s(4, 5) * s(4, 6) +
s(1, 3) * s(2, 6) * s(3, 5) * s(4, 5) * s(4, 6) +
s(1, 5) * s(2, 3) * s(3, 6) * s(4, 5) * s(4, 6) +
s(1, 3) * s(2, 5) * s(3, 6) * s(4, 5) * s(4, 6) -
2 * s(1, 2) * s(3, 5) * s(3, 6) * s(4, 5) * s(4, 6) +
s(1, 5) * s(2, 5) * s(3, 3) * s(4, 6) * s(4, 6) -
s(1, 5) * s(2, 3) * s(3, 5) * s(4, 6) * s(4, 6) -
s(1, 3) * s(2, 5) * s(3, 5) * s(4, 6) * s(4, 6) +
s(1, 2) * s(3, 5) * s(3, 5) * s(4, 6) * s(4, 6) +
s(1, 6) * s(2, 6) * s(3, 4) * s(3, 4) * s(5, 5) -
s(1, 6) * s(2, 4) * s(3, 4) * s(3, 6) * s(5, 5) -
s(1, 4) * s(2, 6) * s(3, 4) * s(3, 6) * s(5, 5) +
s(1, 4) * s(2, 4) * s(3, 6) * s(3, 6) * s(5, 5) -
s(1, 6) * s(2, 6) * s(3, 3) * s(4, 4) * s(5, 5) +
s(1, 6) * s(2, 3) * s(3, 6) * s(4, 4) * s(5, 5) +
s(1, 3) * s(2, 6) * s(3, 6) * s(4, 4) * s(5, 5) -
s(1, 2) * s(3, 6) * s(3, 6) * s(4, 4) * s(5, 5) +
s(1, 6) * s(2, 4) * s(3, 3) * s(4, 6) * s(5, 5) +
s(1, 4) * s(2, 6) * s(3, 3) * s(4, 6) * s(5, 5) -
s(1, 6) * s(2, 3) * s(3, 4) * s(4, 6) * s(5, 5) -
s(1, 3) * s(2, 6) * s(3, 4) * s(4, 6) * s(5, 5) -
s(1, 4) * s(2, 3) * s(3, 6) * s(4, 6) * s(5, 5) -
s(1, 3) * s(2, 4) * s(3, 6) * s(4, 6) * s(5, 5) +
2 * s(1, 2) * s(3, 4) * s(3, 6) * s(4, 6) * s(5, 5) +
s(1, 3) * s(2, 3) * s(4, 6) * s(4, 6) * s(5, 5) -
s(1, 2) * s(3, 3) * s(4, 6) * s(4, 6) * s(5, 5) -
s(1, 6) * s(2, 5) * s(3, 4) * s(3, 4) * s(5, 6) -
s(1, 5) * s(2, 6) * s(3, 4) * s(3, 4) * s(5, 6) +
s(1, 6) * s(2, 4) * s(3, 4) * s(3, 5) * s(5, 6) +
s(1, 4) * s(2, 6) * s(3, 4) * s(3, 5) * s(5, 6) +
s(1, 5) * s(2, 4) * s(3, 4) * s(3, 6) * s(5, 6) +
s(1, 4) * s(2, 5) * s(3, 4) * s(3, 6) * s(5, 6) -
2 * s(1, 4) * s(2, 4) * s(3, 5) * s(3, 6) * s(5, 6) +
s(1, 6) * s(2, 5) * s(3, 3) * s(4, 4) * s(5, 6) +
s(1, 5) * s(2, 6) * s(3, 3) * s(4, 4) * s(5, 6) -
s(1, 6) * s(2, 3) * s(3, 5) * s(4, 4) * s(5, 6) -
s(1, 3) * s(2, 6) * s(3, 5) * s(4, 4) * s(5, 6) -
s(1, 5) * s(2, 3) * s(3, 6) * s(4, 4) * s(5, 6) -
s(1, 3) * s(2, 5) * s(3, 6) * s(4, 4) * s(5, 6) +
2 * s(1, 2) * s(3, 5) * s(3, 6) * s(4, 4) * s(5, 6) -
s(1, 6) * s(2, 4) * s(3, 3) * s(4, 5) * s(5, 6) -
s(1, 4) * s(2, 6) * s(3, 3) * s(4, 5) * s(5, 6) +
s(1, 6) * s(2, 3) * s(3, 4) * s(4, 5) * s(5, 6) +
s(1, 3) * s(2, 6) * s(3, 4) * s(4, 5) * s(5, 6) +
s(1, 4) * s(2, 3) * s(3, 6) * s(4, 5) * s(5, 6) +
s(1, 3) * s(2, 4) * s(3, 6) * s(4, 5) * s(5, 6) -
2 * s(1, 2) * s(3, 4) * s(3, 6) * s(4, 5) * s(5, 6) -
s(1, 5) * s(2, 4) * s(3, 3) * s(4, 6) * s(5, 6) -
s(1, 4) * s(2, 5) * s(3, 3) * s(4, 6) * s(5, 6) +
s(1, 5) * s(2, 3) * s(3, 4) * s(4, 6) * s(5, 6) +
s(1, 3) * s(2, 5) * s(3, 4) * s(4, 6) * s(5, 6) +
s(1, 4) * s(2, 3) * s(3, 5) * s(4, 6) * s(5, 6) +
s(1, 3) * s(2, 4) * s(3, 5) * s(4, 6) * s(5, 6) -
2 * s(1, 2) * s(3, 4) * s(3, 5) * s(4, 6) * s(5, 6) -
2 * s(1, 3) * s(2, 3) * s(4, 5) * s(4, 6) * s(5, 6) +
2 * s(1, 2) * s(3, 3) * s(4, 5) * s(4, 6) * s(5, 6) +
s(1, 4) * s(2, 4) * s(3, 3) * s(5, 6) * s(5, 6) -
s(1, 4) * s(2, 3) * s(3, 4) * s(5, 6) * s(5, 6) -
s(1, 3) * s(2, 4) * s(3, 4) * s(5, 6) * s(5, 6) +
s(1, 2) * s(3, 4) * s(3, 4) * s(5, 6) * s(5, 6) +
s(1, 3) * s(2, 3) * s(4, 4) * s(5, 6) * s(5, 6) -
s(1, 2) * s(3, 3) * s(4, 4) * s(5, 6) * s(5, 6) +
s(1, 5) * s(2, 5) * s(3, 4) * s(3, 4) * s(6, 6) -
s(1, 5) * s(2, 4) * s(3, 4) * s(3, 5) * s(6, 6) -
s(1, 4) * s(2, 5) * s(3, 4) * s(3, 5) * s(6, 6) +
s(1, 4) * s(2, 4) * s(3, 5) * s(3, 5) * s(6, 6) -
s(1, 5) * s(2, 5) * s(3, 3) * s(4, 4) * s(6, 6) +
s(1, 5) * s(2, 3) * s(3, 5) * s(4, 4) * s(6, 6) +
s(1, 3) * s(2, 5) * s(3, 5) * s(4, 4) * s(6, 6) -
s(1, 2) * s(3, 5) * s(3, 5) * s(4, 4) * s(6, 6) +
s(1, 5) * s(2, 4) * s(3, 3) * s(4, 5) * s(6, 6) +
s(1, 4) * s(2, 5) * s(3, 3) * s(4, 5) * s(6, 6) -
s(1, 5) * s(2, 3) * s(3, 4) * s(4, 5) * s(6, 6) -
s(1, 3) * s(2, 5) * s(3, 4) * s(4, 5) * s(6, 6) -
s(1, 4) * s(2, 3) * s(3, 5) * s(4, 5) * s(6, 6) -
s(1, 3) * s(2, 4) * s(3, 5) * s(4, 5) * s(6, 6) +
2 * s(1, 2) * s(3, 4) * s(3, 5) * s(4, 5) * s(6, 6) +
s(1, 3) * s(2, 3) * s(4, 5) * s(4, 5) * s(6, 6) -
s(1, 2) * s(3, 3) * s(4, 5) * s(4, 5) * s(6, 6) -
s(1, 4) * s(2, 4) * s(3, 3) * s(5, 5) * s(6, 6) +
s(1, 4) * s(2, 3) * s(3, 4) * s(5, 5) * s(6, 6) +
s(1, 3) * s(2, 4) * s(3, 4) * s(5, 5) * s(6, 6) -
s(1, 2) * s(3, 4) * s(3, 4) * s(5, 5) * s(6, 6) -
s(1, 3) * s(2, 3) * s(4, 4) * s(5, 5) * s(6, 6) +
s(1, 2) * s(3, 3) * s(4, 4) * s(5, 5) * s(6, 6);
}
/**
* Calculates the gradient of tau for the given indices array (for up to 4 conditioning vars).
*/
private double[][] gradTau() {
int numCondVars = getIndices().length - 2;
switch (numCondVars) {
case 0:
return convertGradTau(gradTau0());
case 1:
return convertGradTau(gradTau1());
case 2:
return convertGradTau(gradTau2());
case 3:
return convertGradTau(gradTau3());
case 4:
return convertGradTau(gradTau4());
default:
throw new IllegalStateException("Only gradients for up to " +
"four conditioning variables were hardcoded: " +
numCondVars);
}
}
/**
* Takes a gradTau for a basic case and transforms it into a gradTau for the case at hand.
*/
private double[][] convertGradTau(double[] basicGradTau) {
double[][] m = MatrixUtils.invVech(basicGradTau);
double[][] m2 = new double[numVars][numVars];
for (int i = 0; i < m.length; i++) {
for (int j = 0; j < m.length; j++) {
m2[getIndices()[i]][getIndices()[j]] = m[i][j];
}
}
return MatrixUtils.vech(m2);
}
/**
* @return grad(tau) for numTimeSteps = 2, indArr = [1 2] (one-indexed). From Mathematica. </p> Examples of the
* gradients of the tau functions were calculated using the following Mathematica script (in this case comparing v1
* and v4 given v2 and v3). These were then modified using Xemacs into Java formulas. (In case anyone needs to do it
* again.)</p>
* <pre>
* numVars = 6;
* indexSub = {1, 4, 2, 3};
* corrAllVars = Table[s[Min[i, j], Max[i, j]], {i, numVars}, {j,
* numVars}];
* subLen = Length[indexSub];
* sub = Table[corrAllVars[[indexSub[[i]], indexSub[[j]]]], {i, subLen},
* {j,
* subLen}];
* tau = -Numerator[Inverse[sub][[1, 2]]];
* vechLen = numVars(numVars + 1)/2;
* vechCorrAllVars = Table[0, {vechLen}];
* numVars = 0;
* For[i = 1, i <= numVars, i++,
* For[j = i, j <= numVars, j++,
* numVars = numVars + 1;
* vechCorrAllVars[[numVars]] = corrAllVars[[i, j]]
* ]
* ]
* grad = Table[D[tau, vechCorrAllVars[[i]]], {i, vechLen}]
* </pre>
*/
private double[] gradTau0() {
return new double[]{0, 1, 0};
}
/**
* @return grad(tau) for numTimeSteps = 3, indArr = [1 2 3] (one-indexed). From Mathematica.
*/
private double[] gradTau1() {
return new double[]{0, s(3, 3), -s(2, 3), 0, -s(1, 3), s(1, 2)};
}
/**
* @return grad(tau) for numTimeSteps = 4, indArr = [1 2 3 4] (one-indexed). From Mathematica.
*/
private double[] gradTau2() {
return new double[]{0, -s(3, 4) * s(3, 4) + s(3, 3) * s(4, 4),
s(2, 4) * s(3, 4) - s(2, 3) * s(4, 4),
-s(2, 4) * s(3, 3) + s(2, 3) * s(3, 4), 0,
s(1, 4) * s(3, 4) - s(1, 3) * s(4, 4),
-s(1, 4) * s(3, 3) + s(1, 3) * s(3, 4),
-s(1, 4) * s(2, 4) + s(1, 2) * s(4, 4),
s(1, 4) * s(2, 3) + s(1, 3) * s(2, 4) - 2 * s(1, 2) * s(3, 4),
-s(1, 3) * s(2, 3) + s(1, 2) * s(3, 3)};
}
/**
* @return grad(tau) for numTimeSteps = 5, indArr = [1 2 3 4 5] (one-indexed). From Mathematica.
*/
private double[] gradTau3() {
return new double[]{0, -s(3, 5) * s(3, 5) * s(4, 4) +
2 * s(3, 4) * s(3, 5) * s(4, 5) - s(3, 3) * s(4, 5) * s(4, 5) -
s(3, 4) * s(3, 4) * s(5, 5) + s(3, 3) * s(4, 4) * s(5, 5), s(2,
5) * s(3, 5) * s(4, 4) - s(2, 5) * s(3, 4) * s(4, 5) -
s(2, 4) * s(3, 5) * s(4, 5) + s(2, 3) * s(4, 5) * s(4, 5) +
s(2, 4) * s(3, 4) * s(5, 5) - s(2, 3) * s(4, 4) * s(5, 5), -s(2,
5) * s(3, 4) * s(3, 5) + s(2, 4) * s(3, 5) * s(3, 5) +
s(2, 5) * s(3, 3) * s(4, 5) - s(2, 3) * s(3, 5) * s(4, 5) -
s(2, 4) * s(3, 3) * s(5, 5) + s(2, 3) * s(3, 4) * s(5, 5), s(2,
5) * s(3, 4) * s(3, 4) - s(2, 4) * s(3, 4) * s(3, 5) -
s(2, 5) * s(3, 3) * s(4, 4) + s(2, 3) * s(3, 5) * s(4, 4) +
s(2, 4) * s(3, 3) * s(4, 5) - s(2, 3) * s(3, 4) * s(4, 5), 0, s(
1, 5) * s(3, 5) * s(4, 4) - s(1, 5) * s(3, 4) * s(4, 5) -
s(1, 4) * s(3, 5) * s(4, 5) + s(1, 3) * s(4, 5) * s(4, 5) +
s(1, 4) * s(3, 4) * s(5, 5) - s(1, 3) * s(4, 4) * s(5, 5), -s(1,
5) * s(3, 4) * s(3, 5) + s(1, 4) * s(3, 5) * s(3, 5) +
s(1, 5) * s(3, 3) * s(4, 5) - s(1, 3) * s(3, 5) * s(4, 5) -
s(1, 4) * s(3, 3) * s(5, 5) + s(1, 3) * s(3, 4) * s(5, 5), s(1,
5) * s(3, 4) * s(3, 4) - s(1, 4) * s(3, 4) * s(3, 5) -
s(1, 5) * s(3, 3) * s(4, 4) + s(1, 3) * s(3, 5) * s(4, 4) +
s(1, 4) * s(3, 3) * s(4, 5) - s(1, 3) * s(3, 4) * s(4, 5), -s(1,
5) * s(2, 5) * s(4, 4) + s(1, 5) * s(2, 4) * s(4, 5) +
s(1, 4) * s(2, 5) * s(4, 5) - s(1, 2) * s(4, 5) * s(4, 5) -
s(1, 4) * s(2, 4) * s(5, 5) + s(1, 2) * s(4, 4) * s(5, 5), 2 *
s(1, 5) * s(2, 5) * s(3, 4) - s(1, 5) * s(2, 4) * s(3, 5) -
s(1, 4) * s(2, 5) * s(3, 5) - s(1, 5) * s(2, 3) * s(4, 5) -
s(1, 3) * s(2, 5) * s(4, 5) + 2 * s(1, 2) * s(3, 5) * s(4, 5) +
s(1, 4) * s(2, 3) * s(5, 5) + s(1, 3) * s(2, 4) * s(5, 5) -
2 * s(1, 2) * s(3, 4) * s(5, 5), -s(1, 5) * s(2, 4) * s(3, 4) -
s(1, 4) * s(2, 5) * s(3, 4) + 2 * s(1, 4) * s(2, 4) * s(3, 5) +
s(1, 5) * s(2, 3) * s(4, 4) + s(1, 3) * s(2, 5) * s(4, 4) -
2 * s(1, 2) * s(3, 5) * s(4, 4) - s(1, 4) * s(2, 3) * s(4, 5) -
s(1, 3) * s(2, 4) * s(4, 5) + 2 * s(1, 2) * s(3, 4) * s(4, 5),
-s(1, 5) * s(2, 5) * s(3, 3) + s(1, 5) * s(2, 3) * s(3, 5) +
s(1, 3) * s(2, 5) * s(3, 5) -
s(1, 2) * s(3, 5) * s(3, 5) -
s(1, 3) * s(2, 3) * s(5, 5) +
s(1, 2) * s(3, 3) * s(5, 5), s(1, 5) * s(2, 4) *
s(3, 3) + s(1, 4) * s(2, 5) * s(3, 3) -
s(1, 5) * s(2, 3) * s(3, 4) - s(1, 3) * s(2, 5) * s(3, 4) -
s(1, 4) * s(2, 3) * s(3, 5) - s(1, 3) * s(2, 4) * s(3, 5) +
2 * s(1, 2) * s(3, 4) * s(3, 5) +
2 * s(1, 3) * s(2, 3) * s(4, 5) -
2 * s(1, 2) * s(3, 3) * s(4, 5), -s(1, 4) * s(2, 4) * s(3, 3) +
s(1, 4) * s(2, 3) * s(3, 4) + s(1, 3) * s(2, 4) * s(3, 4) -
s(1, 2) * s(3, 4) * s(3, 4) - s(1, 3) * s(2, 3) * s(4, 4) +
s(1, 2) * s(3, 3) * s(4, 4)};
}
/**
* @return grad(tau) for numTimeSteps = 6, indArr = [1 2 3 4 5 6] (one-indexed). From Mathematica.
*/
private double[] gradTau4() {
return new double[]{0, s(3, 6) * s(3, 6) * s(4, 5) * s(4, 5) -
2 * s(3, 5) * s(3, 6) * s(4, 5) * s(4, 6) +
s(3, 5) * s(3, 5) * s(4, 6) * s(4, 6) -
s(3, 6) * s(3, 6) * s(4, 4) * s(5, 5) +
2 * s(3, 4) * s(3, 6) * s(4, 6) * s(5, 5) -
s(3, 3) * s(4, 6) * s(4, 6) * s(5, 5) +
2 * s(3, 5) * s(3, 6) * s(4, 4) * s(5, 6) -
2 * s(3, 4) * s(3, 6) * s(4, 5) * s(5, 6) -
2 * s(3, 4) * s(3, 5) * s(4, 6) * s(5, 6) +
2 * s(3, 3) * s(4, 5) * s(4, 6) * s(5, 6) +
s(3, 4) * s(3, 4) * s(5, 6) * s(5, 6) -
s(3, 3) * s(4, 4) * s(5, 6) * s(5, 6) -
s(3, 5) * s(3, 5) * s(4, 4) * s(6, 6) +
2 * s(3, 4) * s(3, 5) * s(4, 5) * s(6, 6) -
s(3, 3) * s(4, 5) * s(4, 5) * s(6, 6) -
s(3, 4) * s(3, 4) * s(5, 5) * s(6, 6) +
s(3, 3) * s(4, 4) * s(5, 5) * s(6, 6), -s(2, 6) * s(3, 6) *
s(4, 5) * s(4, 5) + s(2, 6) * s(3, 5) * s(4, 5) * s(4, 6) +
s(2, 5) * s(3, 6) * s(4, 5) * s(4, 6) -
s(2, 5) * s(3, 5) * s(4, 6) * s(4, 6) +
s(2, 6) * s(3, 6) * s(4, 4) * s(5, 5) -
s(2, 6) * s(3, 4) * s(4, 6) * s(5, 5) -
s(2, 4) * s(3, 6) * s(4, 6) * s(5, 5) +
s(2, 3) * s(4, 6) * s(4, 6) * s(5, 5) -
s(2, 6) * s(3, 5) * s(4, 4) * s(5, 6) -
s(2, 5) * s(3, 6) * s(4, 4) * s(5, 6) +
s(2, 6) * s(3, 4) * s(4, 5) * s(5, 6) +
s(2, 4) * s(3, 6) * s(4, 5) * s(5, 6) +
s(2, 5) * s(3, 4) * s(4, 6) * s(5, 6) +
s(2, 4) * s(3, 5) * s(4, 6) * s(5, 6) -
2 * s(2, 3) * s(4, 5) * s(4, 6) * s(5, 6) -
s(2, 4) * s(3, 4) * s(5, 6) * s(5, 6) +
s(2, 3) * s(4, 4) * s(5, 6) * s(5, 6) +
s(2, 5) * s(3, 5) * s(4, 4) * s(6, 6) -
s(2, 5) * s(3, 4) * s(4, 5) * s(6, 6) -
s(2, 4) * s(3, 5) * s(4, 5) * s(6, 6) +
s(2, 3) * s(4, 5) * s(4, 5) * s(6, 6) +
s(2, 4) * s(3, 4) * s(5, 5) * s(6, 6) -
s(2, 3) * s(4, 4) * s(5, 5) * s(6, 6), s(2, 6) * s(3, 5) *
s(3, 6) * s(4, 5) - s(2, 5) * s(3, 6) * s(3, 6) * s(4, 5) -
s(2, 6) * s(3, 5) * s(3, 5) * s(4, 6) +
s(2, 5) * s(3, 5) * s(3, 6) * s(4, 6) -
s(2, 6) * s(3, 4) * s(3, 6) * s(5, 5) +
s(2, 4) * s(3, 6) * s(3, 6) * s(5, 5) +
s(2, 6) * s(3, 3) * s(4, 6) * s(5, 5) -
s(2, 3) * s(3, 6) * s(4, 6) * s(5, 5) +
s(2, 6) * s(3, 4) * s(3, 5) * s(5, 6) +
s(2, 5) * s(3, 4) * s(3, 6) * s(5, 6) -
2 * s(2, 4) * s(3, 5) * s(3, 6) * s(5, 6) -
s(2, 6) * s(3, 3) * s(4, 5) * s(5, 6) +
s(2, 3) * s(3, 6) * s(4, 5) * s(5, 6) -
s(2, 5) * s(3, 3) * s(4, 6) * s(5, 6) +
s(2, 3) * s(3, 5) * s(4, 6) * s(5, 6) +
s(2, 4) * s(3, 3) * s(5, 6) * s(5, 6) -
s(2, 3) * s(3, 4) * s(5, 6) * s(5, 6) -
s(2, 5) * s(3, 4) * s(3, 5) * s(6, 6) +
s(2, 4) * s(3, 5) * s(3, 5) * s(6, 6) +
s(2, 5) * s(3, 3) * s(4, 5) * s(6, 6) -
s(2, 3) * s(3, 5) * s(4, 5) * s(6, 6) -
s(2, 4) * s(3, 3) * s(5, 5) * s(6, 6) +
s(2, 3) * s(3, 4) * s(5, 5) * s(6, 6), -s(2, 6) * s(3, 5) *
s(3, 6) * s(4, 4) + s(2, 5) * s(3, 6) * s(3, 6) * s(4, 4) +
s(2, 6) * s(3, 4) * s(3, 6) * s(4, 5) -
s(2, 4) * s(3, 6) * s(3, 6) * s(4, 5) +
s(2, 6) * s(3, 4) * s(3, 5) * s(4, 6) -
2 * s(2, 5) * s(3, 4) * s(3, 6) * s(4, 6) +
s(2, 4) * s(3, 5) * s(3, 6) * s(4, 6) -
s(2, 6) * s(3, 3) * s(4, 5) * s(4, 6) +
s(2, 3) * s(3, 6) * s(4, 5) * s(4, 6) +
s(2, 5) * s(3, 3) * s(4, 6) * s(4, 6) -
s(2, 3) * s(3, 5) * s(4, 6) * s(4, 6) -
s(2, 6) * s(3, 4) * s(3, 4) * s(5, 6) +
s(2, 4) * s(3, 4) * s(3, 6) * s(5, 6) +
s(2, 6) * s(3, 3) * s(4, 4) * s(5, 6) -
s(2, 3) * s(3, 6) * s(4, 4) * s(5, 6) -
s(2, 4) * s(3, 3) * s(4, 6) * s(5, 6) +
s(2, 3) * s(3, 4) * s(4, 6) * s(5, 6) +
s(2, 5) * s(3, 4) * s(3, 4) * s(6, 6) -
s(2, 4) * s(3, 4) * s(3, 5) * s(6, 6) -
s(2, 5) * s(3, 3) * s(4, 4) * s(6, 6) +
s(2, 3) * s(3, 5) * s(4, 4) * s(6, 6) +
s(2, 4) * s(3, 3) * s(4, 5) * s(6, 6) -
s(2, 3) * s(3, 4) * s(4, 5) * s(6, 6), s(2, 6) * s(3, 5) *
s(3, 5) * s(4, 4) - s(2, 5) * s(3, 5) * s(3, 6) * s(4, 4) -
2 * s(2, 6) * s(3, 4) * s(3, 5) * s(4, 5) +
s(2, 5) * s(3, 4) * s(3, 6) * s(4, 5) +
s(2, 4) * s(3, 5) * s(3, 6) * s(4, 5) +
s(2, 6) * s(3, 3) * s(4, 5) * s(4, 5) -
s(2, 3) * s(3, 6) * s(4, 5) * s(4, 5) +
s(2, 5) * s(3, 4) * s(3, 5) * s(4, 6) -
s(2, 4) * s(3, 5) * s(3, 5) * s(4, 6) -
s(2, 5) * s(3, 3) * s(4, 5) * s(4, 6) +
s(2, 3) * s(3, 5) * s(4, 5) * s(4, 6) +
s(2, 6) * s(3, 4) * s(3, 4) * s(5, 5) -
s(2, 4) * s(3, 4) * s(3, 6) * s(5, 5) -
s(2, 6) * s(3, 3) * s(4, 4) * s(5, 5) +
s(2, 3) * s(3, 6) * s(4, 4) * s(5, 5) +
s(2, 4) * s(3, 3) * s(4, 6) * s(5, 5) -
s(2, 3) * s(3, 4) * s(4, 6) * s(5, 5) -
s(2, 5) * s(3, 4) * s(3, 4) * s(5, 6) +
s(2, 4) * s(3, 4) * s(3, 5) * s(5, 6) +
s(2, 5) * s(3, 3) * s(4, 4) * s(5, 6) -
s(2, 3) * s(3, 5) * s(4, 4) * s(5, 6) -
s(2, 4) * s(3, 3) * s(4, 5) * s(5, 6) +
s(2, 3) * s(3, 4) * s(4, 5) * s(5, 6), 0, -s(1, 6) * s(3, 6) *
s(4, 5) * s(4, 5) + s(1, 6) * s(3, 5) * s(4, 5) * s(4, 6) +
s(1, 5) * s(3, 6) * s(4, 5) * s(4, 6) -
s(1, 5) * s(3, 5) * s(4, 6) * s(4, 6) +
s(1, 6) * s(3, 6) * s(4, 4) * s(5, 5) -
s(1, 6) * s(3, 4) * s(4, 6) * s(5, 5) -
s(1, 4) * s(3, 6) * s(4, 6) * s(5, 5) +
s(1, 3) * s(4, 6) * s(4, 6) * s(5, 5) -
s(1, 6) * s(3, 5) * s(4, 4) * s(5, 6) -
s(1, 5) * s(3, 6) * s(4, 4) * s(5, 6) +
s(1, 6) * s(3, 4) * s(4, 5) * s(5, 6) +
s(1, 4) * s(3, 6) * s(4, 5) * s(5, 6) +
s(1, 5) * s(3, 4) * s(4, 6) * s(5, 6) +
s(1, 4) * s(3, 5) * s(4, 6) * s(5, 6) -
2 * s(1, 3) * s(4, 5) * s(4, 6) * s(5, 6) -
s(1, 4) * s(3, 4) * s(5, 6) * s(5, 6) +
s(1, 3) * s(4, 4) * s(5, 6) * s(5, 6) +
s(1, 5) * s(3, 5) * s(4, 4) * s(6, 6) -
s(1, 5) * s(3, 4) * s(4, 5) * s(6, 6) -
s(1, 4) * s(3, 5) * s(4, 5) * s(6, 6) +
s(1, 3) * s(4, 5) * s(4, 5) * s(6, 6) +
s(1, 4) * s(3, 4) * s(5, 5) * s(6, 6) -
s(1, 3) * s(4, 4) * s(5, 5) * s(6, 6), s(1, 6) * s(3, 5) *
s(3, 6) * s(4, 5) - s(1, 5) * s(3, 6) * s(3, 6) * s(4, 5) -
s(1, 6) * s(3, 5) * s(3, 5) * s(4, 6) +
s(1, 5) * s(3, 5) * s(3, 6) * s(4, 6) -
s(1, 6) * s(3, 4) * s(3, 6) * s(5, 5) +
s(1, 4) * s(3, 6) * s(3, 6) * s(5, 5) +
s(1, 6) * s(3, 3) * s(4, 6) * s(5, 5) -
s(1, 3) * s(3, 6) * s(4, 6) * s(5, 5) +
s(1, 6) * s(3, 4) * s(3, 5) * s(5, 6) +
s(1, 5) * s(3, 4) * s(3, 6) * s(5, 6) -
2 * s(1, 4) * s(3, 5) * s(3, 6) * s(5, 6) -
s(1, 6) * s(3, 3) * s(4, 5) * s(5, 6) +
s(1, 3) * s(3, 6) * s(4, 5) * s(5, 6) -
s(1, 5) * s(3, 3) * s(4, 6) * s(5, 6) +
s(1, 3) * s(3, 5) * s(4, 6) * s(5, 6) +
s(1, 4) * s(3, 3) * s(5, 6) * s(5, 6) -
s(1, 3) * s(3, 4) * s(5, 6) * s(5, 6) -
s(1, 5) * s(3, 4) * s(3, 5) * s(6, 6) +
s(1, 4) * s(3, 5) * s(3, 5) * s(6, 6) +
s(1, 5) * s(3, 3) * s(4, 5) * s(6, 6) -
s(1, 3) * s(3, 5) * s(4, 5) * s(6, 6) -
s(1, 4) * s(3, 3) * s(5, 5) * s(6, 6) +
s(1, 3) * s(3, 4) * s(5, 5) * s(6, 6), -s(1, 6) * s(3, 5) *
s(3, 6) * s(4, 4) + s(1, 5) * s(3, 6) * s(3, 6) * s(4, 4) +
s(1, 6) * s(3, 4) * s(3, 6) * s(4, 5) -
s(1, 4) * s(3, 6) * s(3, 6) * s(4, 5) +
s(1, 6) * s(3, 4) * s(3, 5) * s(4, 6) -
2 * s(1, 5) * s(3, 4) * s(3, 6) * s(4, 6) +
s(1, 4) * s(3, 5) * s(3, 6) * s(4, 6) -
s(1, 6) * s(3, 3) * s(4, 5) * s(4, 6) +
s(1, 3) * s(3, 6) * s(4, 5) * s(4, 6) +
s(1, 5) * s(3, 3) * s(4, 6) * s(4, 6) -
s(1, 3) * s(3, 5) * s(4, 6) * s(4, 6) -
s(1, 6) * s(3, 4) * s(3, 4) * s(5, 6) +
s(1, 4) * s(3, 4) * s(3, 6) * s(5, 6) +
s(1, 6) * s(3, 3) * s(4, 4) * s(5, 6) -
s(1, 3) * s(3, 6) * s(4, 4) * s(5, 6) -
s(1, 4) * s(3, 3) * s(4, 6) * s(5, 6) +
s(1, 3) * s(3, 4) * s(4, 6) * s(5, 6) +
s(1, 5) * s(3, 4) * s(3, 4) * s(6, 6) -
s(1, 4) * s(3, 4) * s(3, 5) * s(6, 6) -
s(1, 5) * s(3, 3) * s(4, 4) * s(6, 6) +
s(1, 3) * s(3, 5) * s(4, 4) * s(6, 6) +
s(1, 4) * s(3, 3) * s(4, 5) * s(6, 6) -
s(1, 3) * s(3, 4) * s(4, 5) * s(6, 6), s(1, 6) * s(3, 5) *
s(3, 5) * s(4, 4) - s(1, 5) * s(3, 5) * s(3, 6) * s(4, 4) -
2 * s(1, 6) * s(3, 4) * s(3, 5) * s(4, 5) +
s(1, 5) * s(3, 4) * s(3, 6) * s(4, 5) +
s(1, 4) * s(3, 5) * s(3, 6) * s(4, 5) +
s(1, 6) * s(3, 3) * s(4, 5) * s(4, 5) -
s(1, 3) * s(3, 6) * s(4, 5) * s(4, 5) +
s(1, 5) * s(3, 4) * s(3, 5) * s(4, 6) -
s(1, 4) * s(3, 5) * s(3, 5) * s(4, 6) -
s(1, 5) * s(3, 3) * s(4, 5) * s(4, 6) +
s(1, 3) * s(3, 5) * s(4, 5) * s(4, 6) +
s(1, 6) * s(3, 4) * s(3, 4) * s(5, 5) -
s(1, 4) * s(3, 4) * s(3, 6) * s(5, 5) -
s(1, 6) * s(3, 3) * s(4, 4) * s(5, 5) +
s(1, 3) * s(3, 6) * s(4, 4) * s(5, 5) +
s(1, 4) * s(3, 3) * s(4, 6) * s(5, 5) -
s(1, 3) * s(3, 4) * s(4, 6) * s(5, 5) -
s(1, 5) * s(3, 4) * s(3, 4) * s(5, 6) +
s(1, 4) * s(3, 4) * s(3, 5) * s(5, 6) +
s(1, 5) * s(3, 3) * s(4, 4) * s(5, 6) -
s(1, 3) * s(3, 5) * s(4, 4) * s(5, 6) -
s(1, 4) * s(3, 3) * s(4, 5) * s(5, 6) +
s(1, 3) * s(3, 4) * s(4, 5) * s(5, 6), s(1, 6) * s(2, 6) *
s(4, 5) * s(4, 5) - s(1, 6) * s(2, 5) * s(4, 5) * s(4, 6) -
s(1, 5) * s(2, 6) * s(4, 5) * s(4, 6) +
s(1, 5) * s(2, 5) * s(4, 6) * s(4, 6) -
s(1, 6) * s(2, 6) * s(4, 4) * s(5, 5) +
s(1, 6) * s(2, 4) * s(4, 6) * s(5, 5) +
s(1, 4) * s(2, 6) * s(4, 6) * s(5, 5) -
s(1, 2) * s(4, 6) * s(4, 6) * s(5, 5) +
s(1, 6) * s(2, 5) * s(4, 4) * s(5, 6) +
s(1, 5) * s(2, 6) * s(4, 4) * s(5, 6) -
s(1, 6) * s(2, 4) * s(4, 5) * s(5, 6) -
s(1, 4) * s(2, 6) * s(4, 5) * s(5, 6) -
s(1, 5) * s(2, 4) * s(4, 6) * s(5, 6) -
s(1, 4) * s(2, 5) * s(4, 6) * s(5, 6) +
2 * s(1, 2) * s(4, 5) * s(4, 6) * s(5, 6) +
s(1, 4) * s(2, 4) * s(5, 6) * s(5, 6) -
s(1, 2) * s(4, 4) * s(5, 6) * s(5, 6) -
s(1, 5) * s(2, 5) * s(4, 4) * s(6, 6) +
s(1, 5) * s(2, 4) * s(4, 5) * s(6, 6) +
s(1, 4) * s(2, 5) * s(4, 5) * s(6, 6) -
s(1, 2) * s(4, 5) * s(4, 5) * s(6, 6) -
s(1, 4) * s(2, 4) * s(5, 5) * s(6, 6) +
s(1, 2) * s(4, 4) * s(5, 5) * s(6, 6), -2 * s(1, 6) * s(2, 6) *
s(3, 5) * s(4, 5) + s(1, 6) * s(2, 5) * s(3, 6) * s(4, 5) +
s(1, 5) * s(2, 6) * s(3, 6) * s(4, 5) +
s(1, 6) * s(2, 5) * s(3, 5) * s(4, 6) +
s(1, 5) * s(2, 6) * s(3, 5) * s(4, 6) -
2 * s(1, 5) * s(2, 5) * s(3, 6) * s(4, 6) +
2 * s(1, 6) * s(2, 6) * s(3, 4) * s(5, 5) -
s(1, 6) * s(2, 4) * s(3, 6) * s(5, 5) -
s(1, 4) * s(2, 6) * s(3, 6) * s(5, 5) -
s(1, 6) * s(2, 3) * s(4, 6) * s(5, 5) -
s(1, 3) * s(2, 6) * s(4, 6) * s(5, 5) +
2 * s(1, 2) * s(3, 6) * s(4, 6) * s(5, 5) -
2 * s(1, 6) * s(2, 5) * s(3, 4) * s(5, 6) -
2 * s(1, 5) * s(2, 6) * s(3, 4) * s(5, 6) +
s(1, 6) * s(2, 4) * s(3, 5) * s(5, 6) +
s(1, 4) * s(2, 6) * s(3, 5) * s(5, 6) +
s(1, 5) * s(2, 4) * s(3, 6) * s(5, 6) +
s(1, 4) * s(2, 5) * s(3, 6) * s(5, 6) +
s(1, 6) * s(2, 3) * s(4, 5) * s(5, 6) +
s(1, 3) * s(2, 6) * s(4, 5) * s(5, 6) -
2 * s(1, 2) * s(3, 6) * s(4, 5) * s(5, 6) +
s(1, 5) * s(2, 3) * s(4, 6) * s(5, 6) +
s(1, 3) * s(2, 5) * s(4, 6) * s(5, 6) -
2 * s(1, 2) * s(3, 5) * s(4, 6) * s(5, 6) -
s(1, 4) * s(2, 3) * s(5, 6) * s(5, 6) -
s(1, 3) * s(2, 4) * s(5, 6) * s(5, 6) +
2 * s(1, 2) * s(3, 4) * s(5, 6) * s(5, 6) +
2 * s(1, 5) * s(2, 5) * s(3, 4) * s(6, 6) -
s(1, 5) * s(2, 4) * s(3, 5) * s(6, 6) -
s(1, 4) * s(2, 5) * s(3, 5) * s(6, 6) -
s(1, 5) * s(2, 3) * s(4, 5) * s(6, 6) -
s(1, 3) * s(2, 5) * s(4, 5) * s(6, 6) +
2 * s(1, 2) * s(3, 5) * s(4, 5) * s(6, 6) +
s(1, 4) * s(2, 3) * s(5, 5) * s(6, 6) +
s(1, 3) * s(2, 4) * s(5, 5) * s(6, 6) -
2 * s(1, 2) * s(3, 4) * s(5, 5) * s(6, 6), 2 * s(1, 6) *
s(2, 6) * s(3, 5) * s(4, 4) -
s(1, 6) * s(2, 5) * s(3, 6) * s(4, 4) -
s(1, 5) * s(2, 6) * s(3, 6) * s(4, 4) -
2 * s(1, 6) * s(2, 6) * s(3, 4) * s(4, 5) +
s(1, 6) * s(2, 4) * s(3, 6) * s(4, 5) +
s(1, 4) * s(2, 6) * s(3, 6) * s(4, 5) +
s(1, 6) * s(2, 5) * s(3, 4) * s(4, 6) +
s(1, 5) * s(2, 6) * s(3, 4) * s(4, 6) -
2 * s(1, 6) * s(2, 4) * s(3, 5) * s(4, 6) -
2 * s(1, 4) * s(2, 6) * s(3, 5) * s(4, 6) +
s(1, 5) * s(2, 4) * s(3, 6) * s(4, 6) +
s(1, 4) * s(2, 5) * s(3, 6) * s(4, 6) +
s(1, 6) * s(2, 3) * s(4, 5) * s(4, 6) +
s(1, 3) * s(2, 6) * s(4, 5) * s(4, 6) -
2 * s(1, 2) * s(3, 6) * s(4, 5) * s(4, 6) -
s(1, 5) * s(2, 3) * s(4, 6) * s(4, 6) -
s(1, 3) * s(2, 5) * s(4, 6) * s(4, 6) +
2 * s(1, 2) * s(3, 5) * s(4, 6) * s(4, 6) +
s(1, 6) * s(2, 4) * s(3, 4) * s(5, 6) +
s(1, 4) * s(2, 6) * s(3, 4) * s(5, 6) -
2 * s(1, 4) * s(2, 4) * s(3, 6) * s(5, 6) -
s(1, 6) * s(2, 3) * s(4, 4) * s(5, 6) -
s(1, 3) * s(2, 6) * s(4, 4) * s(5, 6) +
2 * s(1, 2) * s(3, 6) * s(4, 4) * s(5, 6) +
s(1, 4) * s(2, 3) * s(4, 6) * s(5, 6) +
s(1, 3) * s(2, 4) * s(4, 6) * s(5, 6) -
2 * s(1, 2) * s(3, 4) * s(4, 6) * s(5, 6) -
s(1, 5) * s(2, 4) * s(3, 4) * s(6, 6) -
s(1, 4) * s(2, 5) * s(3, 4) * s(6, 6) +
2 * s(1, 4) * s(2, 4) * s(3, 5) * s(6, 6) +
s(1, 5) * s(2, 3) * s(4, 4) * s(6, 6) +
s(1, 3) * s(2, 5) * s(4, 4) * s(6, 6) -
2 * s(1, 2) * s(3, 5) * s(4, 4) * s(6, 6) -
s(1, 4) * s(2, 3) * s(4, 5) * s(6, 6) -
s(1, 3) * s(2, 4) * s(4, 5) * s(6, 6) +
2 * s(1, 2) * s(3, 4) * s(4, 5) * s(6, 6), -s(1, 6) * s(2, 5) *
s(3, 5) * s(4, 4) - s(1, 5) * s(2, 6) * s(3, 5) * s(4, 4) +
2 * s(1, 5) * s(2, 5) * s(3, 6) * s(4, 4) +
s(1, 6) * s(2, 5) * s(3, 4) * s(4, 5) +
s(1, 5) * s(2, 6) * s(3, 4) * s(4, 5) +
s(1, 6) * s(2, 4) * s(3, 5) * s(4, 5) +
s(1, 4) * s(2, 6) * s(3, 5) * s(4, 5) -
2 * s(1, 5) * s(2, 4) * s(3, 6) * s(4, 5) -
2 * s(1, 4) * s(2, 5) * s(3, 6) * s(4, 5) -
s(1, 6) * s(2, 3) * s(4, 5) * s(4, 5) -
s(1, 3) * s(2, 6) * s(4, 5) * s(4, 5) +
2 * s(1, 2) * s(3, 6) * s(4, 5) * s(4, 5) -
2 * s(1, 5) * s(2, 5) * s(3, 4) * s(4, 6) +
s(1, 5) * s(2, 4) * s(3, 5) * s(4, 6) +
s(1, 4) * s(2, 5) * s(3, 5) * s(4, 6) +
s(1, 5) * s(2, 3) * s(4, 5) * s(4, 6) +
s(1, 3) * s(2, 5) * s(4, 5) * s(4, 6) -
2 * s(1, 2) * s(3, 5) * s(4, 5) * s(4, 6) -
s(1, 6) * s(2, 4) * s(3, 4) * s(5, 5) -
s(1, 4) * s(2, 6) * s(3, 4) * s(5, 5) +
2 * s(1, 4) * s(2, 4) * s(3, 6) * s(5, 5) +
s(1, 6) * s(2, 3) * s(4, 4) * s(5, 5) +
s(1, 3) * s(2, 6) * s(4, 4) * s(5, 5) -
2 * s(1, 2) * s(3, 6) * s(4, 4) * s(5, 5) -
s(1, 4) * s(2, 3) * s(4, 6) * s(5, 5) -
s(1, 3) * s(2, 4) * s(4, 6) * s(5, 5) +
2 * s(1, 2) * s(3, 4) * s(4, 6) * s(5, 5) +
s(1, 5) * s(2, 4) * s(3, 4) * s(5, 6) +
s(1, 4) * s(2, 5) * s(3, 4) * s(5, 6) -
2 * s(1, 4) * s(2, 4) * s(3, 5) * s(5, 6) -
s(1, 5) * s(2, 3) * s(4, 4) * s(5, 6) -
s(1, 3) * s(2, 5) * s(4, 4) * s(5, 6) +
2 * s(1, 2) * s(3, 5) * s(4, 4) * s(5, 6) +
s(1, 4) * s(2, 3) * s(4, 5) * s(5, 6) +
s(1, 3) * s(2, 4) * s(4, 5) * s(5, 6) -
2 * s(1, 2) * s(3, 4) * s(4, 5) * s(5, 6), s(1, 6) * s(2, 6) *
s(3, 5) * s(3, 5) - s(1, 6) * s(2, 5) * s(3, 5) * s(3, 6) -
s(1, 5) * s(2, 6) * s(3, 5) * s(3, 6) +
s(1, 5) * s(2, 5) * s(3, 6) * s(3, 6) -
s(1, 6) * s(2, 6) * s(3, 3) * s(5, 5) +
s(1, 6) * s(2, 3) * s(3, 6) * s(5, 5) +
s(1, 3) * s(2, 6) * s(3, 6) * s(5, 5) -
s(1, 2) * s(3, 6) * s(3, 6) * s(5, 5) +
s(1, 6) * s(2, 5) * s(3, 3) * s(5, 6) +
s(1, 5) * s(2, 6) * s(3, 3) * s(5, 6) -
s(1, 6) * s(2, 3) * s(3, 5) * s(5, 6) -
s(1, 3) * s(2, 6) * s(3, 5) * s(5, 6) -
s(1, 5) * s(2, 3) * s(3, 6) * s(5, 6) -
s(1, 3) * s(2, 5) * s(3, 6) * s(5, 6) +
2 * s(1, 2) * s(3, 5) * s(3, 6) * s(5, 6) +
s(1, 3) * s(2, 3) * s(5, 6) * s(5, 6) -
s(1, 2) * s(3, 3) * s(5, 6) * s(5, 6) -
s(1, 5) * s(2, 5) * s(3, 3) * s(6, 6) +
s(1, 5) * s(2, 3) * s(3, 5) * s(6, 6) +
s(1, 3) * s(2, 5) * s(3, 5) * s(6, 6) -
s(1, 2) * s(3, 5) * s(3, 5) * s(6, 6) -
s(1, 3) * s(2, 3) * s(5, 5) * s(6, 6) +
s(1, 2) * s(3, 3) * s(5, 5) * s(6, 6), -2 * s(1, 6) * s(2, 6) *
s(3, 4) * s(3, 5) + s(1, 6) * s(2, 5) * s(3, 4) * s(3, 6) +
s(1, 5) * s(2, 6) * s(3, 4) * s(3, 6) +
s(1, 6) * s(2, 4) * s(3, 5) * s(3, 6) +
s(1, 4) * s(2, 6) * s(3, 5) * s(3, 6) -
s(1, 5) * s(2, 4) * s(3, 6) * s(3, 6) -
s(1, 4) * s(2, 5) * s(3, 6) * s(3, 6) +
2 * s(1, 6) * s(2, 6) * s(3, 3) * s(4, 5) -
2 * s(1, 6) * s(2, 3) * s(3, 6) * s(4, 5) -
2 * s(1, 3) * s(2, 6) * s(3, 6) * s(4, 5) +
2 * s(1, 2) * s(3, 6) * s(3, 6) * s(4, 5) -
s(1, 6) * s(2, 5) * s(3, 3) * s(4, 6) -
s(1, 5) * s(2, 6) * s(3, 3) * s(4, 6) +
s(1, 6) * s(2, 3) * s(3, 5) * s(4, 6) +
s(1, 3) * s(2, 6) * s(3, 5) * s(4, 6) +
s(1, 5) * s(2, 3) * s(3, 6) * s(4, 6) +
s(1, 3) * s(2, 5) * s(3, 6) * s(4, 6) -
2 * s(1, 2) * s(3, 5) * s(3, 6) * s(4, 6) -
s(1, 6) * s(2, 4) * s(3, 3) * s(5, 6) -
s(1, 4) * s(2, 6) * s(3, 3) * s(5, 6) +
s(1, 6) * s(2, 3) * s(3, 4) * s(5, 6) +
s(1, 3) * s(2, 6) * s(3, 4) * s(5, 6) +
s(1, 4) * s(2, 3) * s(3, 6) * s(5, 6) +
s(1, 3) * s(2, 4) * s(3, 6) * s(5, 6) -
2 * s(1, 2) * s(3, 4) * s(3, 6) * s(5, 6) -
2 * s(1, 3) * s(2, 3) * s(4, 6) * s(5, 6) +
2 * s(1, 2) * s(3, 3) * s(4, 6) * s(5, 6) +
s(1, 5) * s(2, 4) * s(3, 3) * s(6, 6) +
s(1, 4) * s(2, 5) * s(3, 3) * s(6, 6) -
s(1, 5) * s(2, 3) * s(3, 4) * s(6, 6) -
s(1, 3) * s(2, 5) * s(3, 4) * s(6, 6) -
s(1, 4) * s(2, 3) * s(3, 5) * s(6, 6) -
s(1, 3) * s(2, 4) * s(3, 5) * s(6, 6) +
2 * s(1, 2) * s(3, 4) * s(3, 5) * s(6, 6) +
2 * s(1, 3) * s(2, 3) * s(4, 5) * s(6, 6) -
2 * s(1, 2) * s(3, 3) * s(4, 5) * s(6, 6), s(1, 6) * s(2, 5) *
s(3, 4) * s(3, 5) + s(1, 5) * s(2, 6) * s(3, 4) * s(3, 5) -
s(1, 6) * s(2, 4) * s(3, 5) * s(3, 5) -
s(1, 4) * s(2, 6) * s(3, 5) * s(3, 5) -
2 * s(1, 5) * s(2, 5) * s(3, 4) * s(3, 6) +
s(1, 5) * s(2, 4) * s(3, 5) * s(3, 6) +
s(1, 4) * s(2, 5) * s(3, 5) * s(3, 6) -
s(1, 6) * s(2, 5) * s(3, 3) * s(4, 5) -
s(1, 5) * s(2, 6) * s(3, 3) * s(4, 5) +
s(1, 6) * s(2, 3) * s(3, 5) * s(4, 5) +
s(1, 3) * s(2, 6) * s(3, 5) * s(4, 5) +
s(1, 5) * s(2, 3) * s(3, 6) * s(4, 5) +
s(1, 3) * s(2, 5) * s(3, 6) * s(4, 5) -
2 * s(1, 2) * s(3, 5) * s(3, 6) * s(4, 5) +
2 * s(1, 5) * s(2, 5) * s(3, 3) * s(4, 6) -
2 * s(1, 5) * s(2, 3) * s(3, 5) * s(4, 6) -
2 * s(1, 3) * s(2, 5) * s(3, 5) * s(4, 6) +
2 * s(1, 2) * s(3, 5) * s(3, 5) * s(4, 6) +
s(1, 6) * s(2, 4) * s(3, 3) * s(5, 5) +
s(1, 4) * s(2, 6) * s(3, 3) * s(5, 5) -
s(1, 6) * s(2, 3) * s(3, 4) * s(5, 5) -
s(1, 3) * s(2, 6) * s(3, 4) * s(5, 5) -
s(1, 4) * s(2, 3) * s(3, 6) * s(5, 5) -
s(1, 3) * s(2, 4) * s(3, 6) * s(5, 5) +
2 * s(1, 2) * s(3, 4) * s(3, 6) * s(5, 5) +
2 * s(1, 3) * s(2, 3) * s(4, 6) * s(5, 5) -
2 * s(1, 2) * s(3, 3) * s(4, 6) * s(5, 5) -
s(1, 5) * s(2, 4) * s(3, 3) * s(5, 6) -
s(1, 4) * s(2, 5) * s(3, 3) * s(5, 6) +
s(1, 5) * s(2, 3) * s(3, 4) * s(5, 6) +
s(1, 3) * s(2, 5) * s(3, 4) * s(5, 6) +
s(1, 4) * s(2, 3) * s(3, 5) * s(5, 6) +
s(1, 3) * s(2, 4) * s(3, 5) * s(5, 6) -
2 * s(1, 2) * s(3, 4) * s(3, 5) * s(5, 6) -
2 * s(1, 3) * s(2, 3) * s(4, 5) * s(5, 6) +
2 * s(1, 2) * s(3, 3) * s(4, 5) * s(5, 6), s(1, 6) * s(2, 6) *
s(3, 4) * s(3, 4) - s(1, 6) * s(2, 4) * s(3, 4) * s(3, 6) -
s(1, 4) * s(2, 6) * s(3, 4) * s(3, 6) +
s(1, 4) * s(2, 4) * s(3, 6) * s(3, 6) -
s(1, 6) * s(2, 6) * s(3, 3) * s(4, 4) +
s(1, 6) * s(2, 3) * s(3, 6) * s(4, 4) +
s(1, 3) * s(2, 6) * s(3, 6) * s(4, 4) -
s(1, 2) * s(3, 6) * s(3, 6) * s(4, 4) +
s(1, 6) * s(2, 4) * s(3, 3) * s(4, 6) +
s(1, 4) * s(2, 6) * s(3, 3) * s(4, 6) -
s(1, 6) * s(2, 3) * s(3, 4) * s(4, 6) -
s(1, 3) * s(2, 6) * s(3, 4) * s(4, 6) -
s(1, 4) * s(2, 3) * s(3, 6) * s(4, 6) -
s(1, 3) * s(2, 4) * s(3, 6) * s(4, 6) +
2 * s(1, 2) * s(3, 4) * s(3, 6) * s(4, 6) +
s(1, 3) * s(2, 3) * s(4, 6) * s(4, 6) -
s(1, 2) * s(3, 3) * s(4, 6) * s(4, 6) -
s(1, 4) * s(2, 4) * s(3, 3) * s(6, 6) +
s(1, 4) * s(2, 3) * s(3, 4) * s(6, 6) +
s(1, 3) * s(2, 4) * s(3, 4) * s(6, 6) -
s(1, 2) * s(3, 4) * s(3, 4) * s(6, 6) -
s(1, 3) * s(2, 3) * s(4, 4) * s(6, 6) +
s(1, 2) * s(3, 3) * s(4, 4) * s(6, 6), -s(1, 6) * s(2, 5) *
s(3, 4) * s(3, 4) - s(1, 5) * s(2, 6) * s(3, 4) * s(3, 4) +
s(1, 6) * s(2, 4) * s(3, 4) * s(3, 5) +
s(1, 4) * s(2, 6) * s(3, 4) * s(3, 5) +
s(1, 5) * s(2, 4) * s(3, 4) * s(3, 6) +
s(1, 4) * s(2, 5) * s(3, 4) * s(3, 6) -
2 * s(1, 4) * s(2, 4) * s(3, 5) * s(3, 6) +
s(1, 6) * s(2, 5) * s(3, 3) * s(4, 4) +
s(1, 5) * s(2, 6) * s(3, 3) * s(4, 4) -
s(1, 6) * s(2, 3) * s(3, 5) * s(4, 4) -
s(1, 3) * s(2, 6) * s(3, 5) * s(4, 4) -
s(1, 5) * s(2, 3) * s(3, 6) * s(4, 4) -
s(1, 3) * s(2, 5) * s(3, 6) * s(4, 4) +
2 * s(1, 2) * s(3, 5) * s(3, 6) * s(4, 4) -
s(1, 6) * s(2, 4) * s(3, 3) * s(4, 5) -
s(1, 4) * s(2, 6) * s(3, 3) * s(4, 5) +
s(1, 6) * s(2, 3) * s(3, 4) * s(4, 5) +
s(1, 3) * s(2, 6) * s(3, 4) * s(4, 5) +
s(1, 4) * s(2, 3) * s(3, 6) * s(4, 5) +
s(1, 3) * s(2, 4) * s(3, 6) * s(4, 5) -
2 * s(1, 2) * s(3, 4) * s(3, 6) * s(4, 5) -
s(1, 5) * s(2, 4) * s(3, 3) * s(4, 6) -
s(1, 4) * s(2, 5) * s(3, 3) * s(4, 6) +
s(1, 5) * s(2, 3) * s(3, 4) * s(4, 6) +
s(1, 3) * s(2, 5) * s(3, 4) * s(4, 6) +
s(1, 4) * s(2, 3) * s(3, 5) * s(4, 6) +
s(1, 3) * s(2, 4) * s(3, 5) * s(4, 6) -
2 * s(1, 2) * s(3, 4) * s(3, 5) * s(4, 6) -
2 * s(1, 3) * s(2, 3) * s(4, 5) * s(4, 6) +
2 * s(1, 2) * s(3, 3) * s(4, 5) * s(4, 6) +
2 * s(1, 4) * s(2, 4) * s(3, 3) * s(5, 6) -
2 * s(1, 4) * s(2, 3) * s(3, 4) * s(5, 6) -
2 * s(1, 3) * s(2, 4) * s(3, 4) * s(5, 6) +
2 * s(1, 2) * s(3, 4) * s(3, 4) * s(5, 6) +
2 * s(1, 3) * s(2, 3) * s(4, 4) * s(5, 6) -
2 * s(1, 2) * s(3, 3) * s(4, 4) * s(5, 6), s(1, 5) * s(2, 5) *
s(3, 4) * s(3, 4) - s(1, 5) * s(2, 4) * s(3, 4) * s(3, 5) -
s(1, 4) * s(2, 5) * s(3, 4) * s(3, 5) +
s(1, 4) * s(2, 4) * s(3, 5) * s(3, 5) -
s(1, 5) * s(2, 5) * s(3, 3) * s(4, 4) +
s(1, 5) * s(2, 3) * s(3, 5) * s(4, 4) +
s(1, 3) * s(2, 5) * s(3, 5) * s(4, 4) -
s(1, 2) * s(3, 5) * s(3, 5) * s(4, 4) +
s(1, 5) * s(2, 4) * s(3, 3) * s(4, 5) +
s(1, 4) * s(2, 5) * s(3, 3) * s(4, 5) -
s(1, 5) * s(2, 3) * s(3, 4) * s(4, 5) -
s(1, 3) * s(2, 5) * s(3, 4) * s(4, 5) -
s(1, 4) * s(2, 3) * s(3, 5) * s(4, 5) -
s(1, 3) * s(2, 4) * s(3, 5) * s(4, 5) +
2 * s(1, 2) * s(3, 4) * s(3, 5) * s(4, 5) +
s(1, 3) * s(2, 3) * s(4, 5) * s(4, 5) -
s(1, 2) * s(3, 3) * s(4, 5) * s(4, 5) -
s(1, 4) * s(2, 4) * s(3, 3) * s(5, 5) +
s(1, 4) * s(2, 3) * s(3, 4) * s(5, 5) +
s(1, 3) * s(2, 4) * s(3, 4) * s(5, 5) -
s(1, 2) * s(3, 4) * s(3, 4) * s(5, 5) -
s(1, 3) * s(2, 3) * s(4, 4) * s(5, 5) +
s(1, 2) * s(3, 3) * s(4, 4) * s(5, 5)};
}
/**
* @return the sample correlation of var(i) with var(j) (one-indexed).
*/
private double s(int i, int j) {
return indexedCorr().getValue(i - 1, j - 1);
}
public double getChiSquare() {
return chiSquare;
}
public String toString() {
return "Time Series";
}
public DataSet getData() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ICovarianceMatrix getCov() {
return null;
}
@Override
public List<DataSet> getDataSets() {
return null;
}
@Override
public int getSampleSize() {
return 0;
}
@Override
public List<TetradMatrix> getCovMatrices() {
return null;
}
@Override
public double getScore() {
return getPValue();
}
}
| gpl-2.0 |
saces/fred | src/freenet/client/StandardOnionFECCodec.java | 4065 | /* This code is part of Freenet. It is distributed under the GNU General
* Public License, version 2 (or at your option any later version). See
* http://www.gnu.org/ for further details of the GPL. */
package freenet.client;
import com.onionnetworks.fec.FECCode;
import com.onionnetworks.fec.PureCode;
import freenet.support.LRUHashtable;
import freenet.support.LogThresholdCallback;
import freenet.support.Logger;
import freenet.support.Logger.LogLevel;
/**
* FECCodec implementation using the onion code.
*/
public class StandardOnionFECCodec extends FECCodec {
// REDFLAG: How big is one of these?
private static int MAX_CACHED_CODECS = 8;
static boolean noNative;
private static final LRUHashtable<MyKey, StandardOnionFECCodec> recentlyUsedCodecs = new LRUHashtable<MyKey, StandardOnionFECCodec>();
private static volatile boolean logMINOR;
static {
Logger.registerLogThresholdCallback(new LogThresholdCallback(){
@Override
public void shouldUpdate(){
logMINOR = Logger.shouldLog(LogLevel.MINOR, this);
}
});
}
private static class MyKey {
/** Number of input blocks */
int k;
/** Number of output blocks, including input blocks */
int n;
public MyKey(int k, int n) {
this.n = n;
this.k = k;
}
@Override
public boolean equals(Object o) {
if(o instanceof MyKey) {
MyKey key = (MyKey)o;
return (key.n == n) && (key.k == k);
} else return false;
}
@Override
public int hashCode() {
return (n << 16) + k;
}
}
public synchronized static FECCodec getInstance(int dataBlocks, int checkBlocks) {
if(checkBlocks == 0 || dataBlocks == 0)
throw new IllegalArgumentException("data blocks "+dataBlocks+" check blocks "+checkBlocks);
MyKey key = new MyKey(dataBlocks, checkBlocks + dataBlocks);
StandardOnionFECCodec codec = recentlyUsedCodecs.get(key);
if(codec != null) {
recentlyUsedCodecs.push(key, codec);
return codec;
}
codec = new StandardOnionFECCodec(dataBlocks, checkBlocks + dataBlocks);
recentlyUsedCodecs.push(key, codec);
while(recentlyUsedCodecs.size() > MAX_CACHED_CODECS) {
recentlyUsedCodecs.popKey();
}
return codec;
}
public StandardOnionFECCodec(int k, int n) {
super(k, n);
loadFEC();
}
@Override
protected void loadFEC() {
synchronized(this) {
if(fec != null) return;
}
FECCode fec2 = null;
if(k >= n) throw new IllegalArgumentException("n must be >k: n = "+n+" k = "+k);
if(k > 256 || n > 256) Logger.error(this, "Wierd FEC parameters? k = "+k+" n = "+n);
// native code segfaults if k < 256 and n > 256
// native code segfaults if n > k*2 i.e. if we have extra blocks beyond 100% redundancy
// FIXME: NATIVE FEC DISABLED PENDING FIXING THE SEGFAULT BUG (easily reproduced with check blocks > data blocks)
// AND A COMPETENT CODE REVIEW!!!
// SEGFAULT BUGS ARE USUALLY REMOTELY EXPLOITABLE!!!
// if((!noNative) && k <= 256 && n <= 256 && n <= k*2) {
// System.out.println("Creating native FEC: n="+n+" k="+k);
// System.out.flush();
// try {
// fec2 = new Native8Code(k,n);
// Logger.minor(this, "Loaded native FEC.");
//
// } catch (Throwable t) {
// if(!noNative) {
// System.err.println("Failed to load native FEC: "+t);
// t.printStackTrace();
// }
// Logger.error(this, "Failed to load native FEC: "+t+" (k="+k+" n="+n+ ')', t);
//
// if(t instanceof UnsatisfiedLinkError)
// noNative = true;
// }
// } // FIXME 16-bit native FEC???
if (fec2 != null){
synchronized(this) {
fec = fec2;
}
} else {
fec2 = new PureCode(k,n);
synchronized(this) {
fec = fec2;
}
}
// revert to below if above causes JVM crashes
// Worst performance, but decode crashes
// fec = new PureCode(k,n);
// Crashes are caused by bugs which cause to use 320/128 etc. - n > 256, k < 256.
}
@Override
public int countCheckBlocks() {
return n-k;
}
@Override
public String toString() {
return super.toString()+":n="+n+",k="+k;
}
@Override
public short getAlgorithm() {
return Metadata.SPLITFILE_ONION_STANDARD;
}
}
| gpl-2.0 |
geoserver/geofence | src/gui/core/plugin/userui/src/main/java/org/geoserver/geofence/gui/server/gwt/RulesManagerServiceImpl.java | 6867 | /* (c) 2014 Open Source Geospatial Foundation - all rights reserved
* This code is licensed under the GPL 2.0 license, available at the root
* application directory.
*/
package org.geoserver.geofence.gui.server.gwt;
import java.util.List;
import com.extjs.gxt.ui.client.data.PagingLoadResult;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
import org.geoserver.geofence.gui.client.ApplicationException;
import org.geoserver.geofence.gui.client.model.RuleModel;
import org.geoserver.geofence.gui.client.model.data.LayerAttribUI;
import org.geoserver.geofence.gui.client.model.data.LayerCustomProps;
import org.geoserver.geofence.gui.client.model.data.LayerDetailsInfo;
import org.geoserver.geofence.gui.client.model.data.LayerLimitsInfo;
import org.geoserver.geofence.gui.client.model.data.LayerStyle;
import org.geoserver.geofence.gui.client.service.RulesManagerRemoteService;
import org.geoserver.geofence.gui.server.service.IRulesManagerService;
import org.geoserver.geofence.gui.spring.ApplicationContextUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
// TODO: Auto-generated Javadoc
/**
* The Class RulesManagerServiceImpl.
*/
public class RulesManagerServiceImpl extends RemoteServiceServlet implements RulesManagerRemoteService
{
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 5342510982782032063L;
/** The logger. */
private final Logger logger = LoggerFactory.getLogger(this.getClass());
/** The rules manager service. */
private IRulesManagerService rulesManagerService;
/**
* Instantiates a new rules manager service impl.
*/
public RulesManagerServiceImpl()
{
this.rulesManagerService = (IRulesManagerService) ApplicationContextUtil.getInstance().getBean("rulesManagerServiceGWT");
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.ProfilesManagerRemoteService#getRules(com.extjs
* .gxt.ui.client.data.PagingLoadConfig)
*/
public PagingLoadResult<RuleModel> getRules(int offset, int limit, boolean full) throws ApplicationException
{
PagingLoadResult<RuleModel> ret = rulesManagerService.getRules(offset, limit, full);
return ret;
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#saveAllRules(java.util
* .List)
*/
public void saveRule(RuleModel rule) throws ApplicationException
{
rulesManagerService.saveRule(rule);
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#saveAllRules(java.util
* .List)
*/
public void deleteRule(RuleModel rule) throws ApplicationException
{
rulesManagerService.deleteRule(rule);
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#saveAllRules(java.util
* .List)
*/
public void saveAllRules(List<RuleModel> rules) throws ApplicationException
{
rulesManagerService.saveAllRules(rules);
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#getLayerCustomProps(
* com.extjs.gxt.ui.client.data.PagingLoadConfig, org.geoserver.geofence.gui.client.model.Rule)
*/
// public PagingLoadResult<LayerCustomProps> getLayerCustomProps(int offset, int limit, Rule rule)
// throws ApplicationException
// {
// return rulesManagerService.getLayerCustomProps(offset, limit, rule);
// }
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#setDetailsProps(java
* .lang.Long, org.geoserver.geofence.gui.client.model.data.LayerCustomProps)
*/
public void setDetailsProps(Long ruleId, List<LayerCustomProps> customProps) throws ApplicationException
{
rulesManagerService.setDetailsProps(ruleId, customProps);
}
public void shift(long priorityStart, long offset) throws ApplicationException
{
rulesManagerService.shift(priorityStart, offset);
}
public void swap(long id1, long id2) throws ApplicationException
{
rulesManagerService.swap(id1, id2);
}
public void findRule(RuleModel rule) throws ApplicationException, Exception
{
rulesManagerService.findRule(rule);
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#setLayerAttributes(java
* .lang.Long, java.util.List)
*/
public void setLayerAttributes(Long ruleId, List<LayerAttribUI> layerAttributes) throws ApplicationException
{
rulesManagerService.setLayerAttributes(ruleId, layerAttributes);
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#saveLayerDetails(it.
* geosolutions.geofence.gui.client.model.data.LayerDetailsForm)
*/
public LayerDetailsInfo saveLayerDetailsInfo(LayerDetailsInfo layerDetailsForm, List<LayerStyle> layerStyles)
throws ApplicationException
{
return rulesManagerService.saveLayerDetailsInfo(layerDetailsForm, layerStyles);
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#getLayerDetailsInfo(
* org.geoserver.geofence.gui.client.model.Rule)
*/
public LayerDetailsInfo getLayerDetailsInfo(RuleModel rule) throws ApplicationException
{
return rulesManagerService.getLayerDetailsInfo(rule);
}
/*
* (non-Javadoc)
*
* @see
* org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#getLayerAttributes(com
* .extjs.gxt.ui.client.data.PagingLoadConfig, org.geoserver.geofence.gui.client.model.Rule)
*/
public List<LayerAttribUI> getLayerAttributes(RuleModel rule) throws ApplicationException
{
return rulesManagerService.getLayerAttributes(rule);
}
/* (non-Javadoc)
* @see org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#saveLayerLimitsInfo(org.geoserver.geofence.gui.client.model.data.LayerLimitsInfo)
*/
public LayerLimitsInfo saveLayerLimitsInfo(LayerLimitsInfo layerLimitsForm) throws ApplicationException
{
return rulesManagerService.saveLayerLimitsInfo(layerLimitsForm);
}
/* (non-Javadoc)
* @see org.geoserver.geofence.gui.client.service.RulesManagerRemoteService#getLayerLimitsInfo(org.geoserver.geofence.gui.client.model.Rule)
*/
public LayerLimitsInfo getLayerLimitsInfo(RuleModel rule) throws ApplicationException
{
return rulesManagerService.getLayerLimitsInfo(rule);
}
}
| gpl-2.0 |
arodchen/MaxSim | graal/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/virtual/AllocatedObjectNode.java | 2106 | /*
* Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.nodes.virtual;
import com.oracle.graal.nodes.calc.*;
import com.oracle.graal.nodes.spi.*;
import com.oracle.graal.nodes.type.*;
/**
* Selects one object from a {@link CommitAllocationNode}. The object is identified by its
* {@link VirtualObjectNode}.
*/
public class AllocatedObjectNode extends FloatingNode implements Virtualizable {
@Input private VirtualObjectNode virtualObject;
@Input private CommitAllocationNode commit;
public AllocatedObjectNode(VirtualObjectNode virtualObject) {
super(StampFactory.exactNonNull(virtualObject.type()));
this.virtualObject = virtualObject;
}
public VirtualObjectNode getVirtualObject() {
return virtualObject;
}
public CommitAllocationNode getCommit() {
return commit;
}
public void setCommit(CommitAllocationNode x) {
updateUsages(commit, x);
commit = x;
}
@Override
public void virtualize(VirtualizerTool tool) {
tool.replaceWithVirtual(getVirtualObject());
}
}
| gpl-2.0 |
AntumDeluge/arianne-stendhal | src/games/stendhal/server/maps/quests/KanmararnSoldiers.java | 19127 | /* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.server.maps.quests;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import org.apache.log4j.Logger;
import games.stendhal.common.parser.Sentence;
import games.stendhal.server.core.engine.SingletonRepository;
import games.stendhal.server.core.engine.StendhalRPZone;
import games.stendhal.server.core.events.TurnListener;
import games.stendhal.server.entity.Entity;
import games.stendhal.server.entity.item.Corpse;
import games.stendhal.server.entity.item.Item;
import games.stendhal.server.entity.npc.ChatAction;
import games.stendhal.server.entity.npc.ChatCondition;
import games.stendhal.server.entity.npc.ConversationPhrases;
import games.stendhal.server.entity.npc.ConversationStates;
import games.stendhal.server.entity.npc.EventRaiser;
import games.stendhal.server.entity.npc.SpeakerNPC;
import games.stendhal.server.entity.npc.action.DropInfostringItemAction;
import games.stendhal.server.entity.npc.action.EquipItemAction;
import games.stendhal.server.entity.npc.action.IncreaseKarmaAction;
import games.stendhal.server.entity.npc.action.IncreaseXPAction;
import games.stendhal.server.entity.npc.action.MultipleActions;
import games.stendhal.server.entity.npc.action.SetQuestAndModifyKarmaAction;
import games.stendhal.server.entity.npc.condition.AndCondition;
import games.stendhal.server.entity.npc.condition.GreetingMatchesNameCondition;
import games.stendhal.server.entity.npc.condition.NotCondition;
import games.stendhal.server.entity.npc.condition.OrCondition;
import games.stendhal.server.entity.npc.condition.PlayerHasInfostringItemWithHimCondition;
import games.stendhal.server.entity.npc.condition.PlayerOwnsItemIncludingBankCondition;
import games.stendhal.server.entity.npc.condition.QuestActiveCondition;
import games.stendhal.server.entity.npc.condition.QuestCompletedCondition;
import games.stendhal.server.entity.npc.condition.QuestInStateCondition;
import games.stendhal.server.entity.npc.condition.QuestNotCompletedCondition;
import games.stendhal.server.entity.npc.condition.QuestNotInStateCondition;
import games.stendhal.server.entity.npc.condition.QuestNotStartedCondition;
import games.stendhal.server.entity.player.Player;
import games.stendhal.server.maps.Region;
import marauroa.common.game.RPObject;
import marauroa.common.game.SlotIsFullException;
/**
* QUEST:
* <p>
* Soldiers in Kanmararn.
*
* NOTE:
* <p>
* It also starts a quest that needs NPC McPegleg that is created. It doesn't
* harm if that script is missing, just that the IOU cannot be delivered and
* hence the player can't get cash
*
* PARTICIPANTS:
* <li> Henry
* <li> Sergeant James
* <li> corpse of Tom
* <li> corpse of Charles
* <li> corpse of Peter
*
* STEPS:
* <li> optional: speak to Sergeant James to get the task to find the map
* <li> talk to Henry to get the task to find some proof that the other 3
* soldiers are dead.
* <li> collect the item in each of the corpses of the three other soldiers
* <li> bring them back to Henry to get the map - bring the map to Sergeant
* James
*
* REWARD:
* <p>
* from Henry:
* <li> you can keep the IOU paper (for quest MCPeglegIOU)
* <li> 2,500 XP
* <li> some karma (15)
* <p>
* from Sergeant James
* <li> mainio boots
* <li> some karma (15)
*
* REPETITIONS:
* <li> None.
*
* @see McPeglegIOU
*/
public class KanmararnSoldiers extends AbstractQuest {
private static final Logger logger = Logger.getLogger(KanmararnSoldiers.class);
private static final String QUEST_SLOT = "soldier_henry";
/**
* The maximum time (in seconds) until plundered corpses will be filled
* again, so that other players can do the quest as well.
*/
private static final int CORPSE_REFILL_SECONDS = 60;
/* Soldier names used in quest */
private static final String SLD_HENRY = "Henry";
private static final String SLD_CHARLES = "Charles";
private static final String SLD_TOM = "Tom";
private static final String SLD_PETER = "Peter";
private static final String SRG_JAMES = "Sergeant James";
@Override
public String getSlotName() {
return QUEST_SLOT;
}
/**
* A CorpseRefiller checks, in regular intervals, if the given corpse.
*
* @author daniel
*
*/
static class CorpseRefiller implements TurnListener {
private final Corpse corpse;
private final String itemName;
private final String description;
public CorpseRefiller(final Corpse corpse, final String itemName, final String description) {
this.corpse = corpse;
this.itemName = itemName;
this.description = description;
}
public void start() {
SingletonRepository.getTurnNotifier().notifyInTurns(1, this);
}
private boolean equalsExpectedItem(final Item item) {
if (!item.getName().equals(itemName)) {
return false;
}
if (!item.getDescription().equals(description)) {
return false;
}
return corpse.getName().equals(item.getInfoString());
}
@Override
public void onTurnReached(final int currentTurn) {
boolean isStillFilled = false;
// Check if the item is still in the corpse. Note that somebody
// might have put other stuff into the corpse.
for (final RPObject object : corpse.getSlot("content")) {
if (object instanceof Item) {
final Item item = (Item) object;
if (equalsExpectedItem(item)) {
isStillFilled = true;
}
}
}
try {
if (!isStillFilled) {
// recreate the item and fill the corpse
final Item item = SingletonRepository.getEntityManager().getItem(
itemName);
item.setInfoString(corpse.getName());
item.setDescription(description);
corpse.add(item);
corpse.notifyWorldAboutChanges();
}
} catch (final SlotIsFullException e) {
// ignore, just don't refill the corpse until someone removes
// the other items from the corpse
logger.warn("Quest corpse is full: " + corpse.getName());
}
// continue the checking cycle
SingletonRepository.getTurnNotifier().notifyInSeconds(CORPSE_REFILL_SECONDS, this);
}
}
static class HenryQuestNotCompletedCondition implements ChatCondition {
@Override
public boolean fire(final Player player, final Sentence sentence, final Entity npc) {
return !player.hasQuest(QUEST_SLOT) || player.getQuest(QUEST_SLOT).equals("start");
}
}
static class HenryQuestCompletedCondition implements ChatCondition {
@Override
public boolean fire(final Player player, final Sentence sentence, final Entity npc) {
return player.hasQuest(QUEST_SLOT) && !player.getQuest(QUEST_SLOT).equals("start");
}
}
static class GiveMapAction implements ChatAction {
private boolean bind = false;
public GiveMapAction(boolean bind) {
this.bind = bind;
}
@Override
public void fire(final Player player, final Sentence sentence, final EventRaiser npc) {
final Item map = SingletonRepository.getEntityManager().getItem("map");
map.setInfoString(npc.getName());
map.setDescription("You see a hand drawn map, but no matter how you look at it, nothing on it looks familiar.");
if (bind) {
map.setBoundTo(player.getName());
}
player.equipOrPutOnGround(map);
player.setQuest(QUEST_SLOT, "map");
}
}
/**
* We add text for NPC Henry who will get us on the quest.
*/
private void prepareCowardSoldier() {
final SpeakerNPC henry = npcs.get(SLD_HENRY);
henry.add(ConversationStates.ATTENDING,
ConversationPhrases.QUEST_MESSAGES,
new AndCondition(new QuestNotStartedCondition(QUEST_SLOT),
new QuestNotInStateCondition(QUEST_SLOT,"map")),
ConversationStates.QUEST_OFFERED,
"Find my #group, Peter, Tom, and Charles, prove it and I will reward you. Will you do it?",
null);
henry.add(ConversationStates.ATTENDING,
ConversationPhrases.QUEST_MESSAGES,
new OrCondition(new QuestCompletedCondition(QUEST_SLOT),
new QuestInStateCondition(QUEST_SLOT,"map")),
ConversationStates.ATTENDING,
"I'm so sad that most of my friends are dead.",
null);
henry.add(ConversationStates.QUEST_OFFERED,
ConversationPhrases.YES_MESSAGES, null,
ConversationStates.ATTENDING,
"Thank you! I'll be waiting for your return.",
new SetQuestAndModifyKarmaAction(QUEST_SLOT, "start", 3));
// player tries to ask for quest again after starting
henry.add(
ConversationStates.ATTENDING,
ConversationPhrases.QUEST_MESSAGES,
new AndCondition(
new QuestActiveCondition(QUEST_SLOT),
new QuestNotInStateCondition(QUEST_SLOT, "map")),
ConversationStates.ATTENDING,
"I have already asked you to find my friends Peter, Tom, and Charles.",
null);
henry.add(
ConversationStates.QUEST_OFFERED,
"group",
null,
ConversationStates.QUEST_OFFERED,
"The General sent five of us to explore this area in search for #treasure. So, will you help me find them?",
null);
henry.add(
ConversationStates.QUEST_OFFERED,
"treasure",
null,
ConversationStates.QUEST_OFFERED,
"A big treasure is rumored to be #somewhere in this dungeon. Will you help me find my group?",
null);
henry.add(ConversationStates.QUEST_OFFERED,
ConversationPhrases.NO_MESSAGES, null,
ConversationStates.ATTENDING,
"OK. I understand. I'm scared of the #dwarves myself.",
new SetQuestAndModifyKarmaAction(QUEST_SLOT, "rejected", -5.0));
final List<ChatAction> actions = new LinkedList<ChatAction>();
actions.add(new IncreaseXPAction(2500));
actions.add(new DropInfostringItemAction("leather legs", SLD_TOM));
actions.add(new DropInfostringItemAction("scale armor", SLD_PETER));
actions.add(new IncreaseKarmaAction(15.0));
actions.add(new GiveMapAction(false));
henry.add(ConversationStates.IDLE,
ConversationPhrases.GREETING_MESSAGES,
new AndCondition(new GreetingMatchesNameCondition(henry.getName()),
new QuestInStateCondition(QUEST_SLOT, "start"),
new PlayerHasInfostringItemWithHimCondition("leather legs", SLD_TOM),
new PlayerHasInfostringItemWithHimCondition("note", SLD_CHARLES),
new PlayerHasInfostringItemWithHimCondition("scale armor", SLD_PETER)),
ConversationStates.ATTENDING,
"Oh my! Peter, Tom, and Charles are all dead? *cries*. Anyway, here is your reward. And keep the IOU.",
new MultipleActions(actions));
henry.add(ConversationStates.IDLE,
ConversationPhrases.GREETING_MESSAGES,
new AndCondition(new GreetingMatchesNameCondition(henry.getName()),
new QuestInStateCondition(QUEST_SLOT, "start"),
new NotCondition(
new AndCondition(
new PlayerHasInfostringItemWithHimCondition("leather legs", SLD_TOM),
new PlayerHasInfostringItemWithHimCondition("note", SLD_CHARLES),
new PlayerHasInfostringItemWithHimCondition("scale armor", SLD_PETER)))),
ConversationStates.ATTENDING,
"You didn't prove that you have found them all!",
null);
henry.add(ConversationStates.ATTENDING, Arrays.asList("map", "group", "help"),
new OrCondition(
new QuestCompletedCondition(QUEST_SLOT),
new AndCondition(new HenryQuestCompletedCondition(),
new PlayerOwnsItemIncludingBankCondition("map"))),
ConversationStates.ATTENDING,
"I'm so sad that most of my friends are dead.", null);
henry.add(ConversationStates.ATTENDING, Arrays.asList("map"),
new AndCondition(
new QuestNotCompletedCondition(QUEST_SLOT),
new HenryQuestCompletedCondition(),
new NotCondition(new PlayerOwnsItemIncludingBankCondition("map"))),
ConversationStates.ATTENDING,
"Luckily I drew a copy of the map, but please don't lose this one.",
new GiveMapAction(true));
henry.add(ConversationStates.ATTENDING, Arrays.asList("map"),
new HenryQuestNotCompletedCondition(),
ConversationStates.ATTENDING,
"If you find my friends, I will give you the map.", null);
}
/**
* add corpses of ex-NPCs.
*/
private void prepareCorpses() {
final StendhalRPZone zone = SingletonRepository.getRPWorld().getZone("-6_kanmararn_city");
// Now we create the corpse of the second NPC
final Corpse tom = new Corpse("youngsoldiernpc", 5, 47);
// he died first
tom.setStage(4);
tom.setName(SLD_TOM);
tom.setKiller("a Dwarven patrol");
// Add our new Ex-NPC to the game world
zone.add(tom);
// Add a refiller to automatically fill the corpse of unlucky Tom
final CorpseRefiller tomRefiller = new CorpseRefiller(tom, "leather legs",
"You see torn leather legs that are heavily covered with blood.");
tomRefiller.start();
// Now we create the corpse of the third NPC
final Corpse charles = new Corpse("youngsoldiernpc", 94, 5);
// he died second
charles.setStage(3);
charles.setName(SLD_CHARLES);
charles.setKiller("a Dwarven patrol");
// Add our new Ex-NPC to the game world
zone.add(charles);
// Add a refiller to automatically fill the corpse of unlucky Charles
final CorpseRefiller charlesRefiller = new CorpseRefiller(charles, "note",
"You read: \"IOU 250 money. (signed) McPegleg\"");
charlesRefiller.start();
// Now we create the corpse of the fourth NPC
final Corpse peter = new Corpse("youngsoldiernpc", 11, 63);
// he died recently
peter.setStage(2);
peter.setName(SLD_PETER);
peter.setKiller("a Dwarven patrol");
// Add our new Ex-NPC to the game world
zone.add(peter);
// Add a refiller to automatically fill the corpse of unlucky Peter
final CorpseRefiller peterRefiller = new CorpseRefiller(
peter,
"scale armor",
"You see a slightly rusty scale armor. It is heavily deformed by several strong hammer blows.");
peterRefiller.start();
}
/**
* add James.
*/
private void prepareSergeant() {
final SpeakerNPC james = npcs.get(SRG_JAMES);
// quest related stuff
james.addHelp("Think I need a little help myself. My #group got killed and #one of my men ran away. Too bad he had the #map.");
james.addQuest("Find my fugitive soldier and bring him to me ... or at least the #map he's carrying.");
james.addReply("group",
"We were five, three of us died. You probably passed their corpses.");
james.addReply(Arrays.asList("one", "henry"),
"Yes, my youngest soldier. He ran away.");
james.addReply("map",
"The #treasure map that leads into the heart of the #dwarven #kingdom.");
james.addReply("treasure",
"A big treasure is rumored to be somewhere in this dungeon.");
james.addReply(Arrays.asList("dwarf", "dwarves", "dwarven", "dwarven kingdom"),
"They are strong enemies! We're in their #kingdom.");
james.addReply(Arrays.asList("peter", "tom", "charles"),
"He was a good soldier and fought bravely.");
james.addReply(Arrays.asList("kingdom", "kanmararn"),
"Kanmararn, the legendary kingdom of the #dwarves.");
james.addReply("dreamscape",
"There's a man east of town. He knows the way.");
final List<ChatAction> actions = new LinkedList<ChatAction>();
actions.add(new IncreaseXPAction(5000));
actions.add(new DropInfostringItemAction("map", SLD_HENRY));
actions.add(new SetQuestAndModifyKarmaAction(QUEST_SLOT, "done", 15.0));
actions.add(new EquipItemAction("mainio boots", 1, true));
james.add(ConversationStates.ATTENDING,
Arrays.asList("map", "henry"),
new AndCondition(new QuestInStateCondition(QUEST_SLOT, "map"),
new PlayerHasInfostringItemWithHimCondition("map", SLD_HENRY)),
ConversationStates.ATTENDING,
"The map! Wonderful! Thank you. And here is your reward. I got these boots while on the #dreamscape.",
new MultipleActions(actions));
james.add(ConversationStates.ATTENDING,
Arrays.asList("map", "henry"),
new AndCondition(new QuestInStateCondition(QUEST_SLOT, "map"),
new NotCondition(new PlayerHasInfostringItemWithHimCondition("map", SLD_HENRY))),
ConversationStates.ATTENDING,
"Well, where is the map?",
null);
james.add(ConversationStates.ATTENDING, ConversationPhrases.QUEST_MESSAGES,
new QuestCompletedCondition(QUEST_SLOT),
ConversationStates.ATTENDING,
"Thanks again for bringing me the map!", null);
james.add(ConversationStates.ATTENDING, ConversationPhrases.HELP_MESSAGES,
new QuestCompletedCondition(QUEST_SLOT),
ConversationStates.ATTENDING,
"Thanks again for bringing me the map!", null);
james.add(ConversationStates.ATTENDING, Arrays.asList("map", "henry",
"group", "one"),
new QuestCompletedCondition(QUEST_SLOT),
ConversationStates.ATTENDING,
"Thanks again for bringing me the map!", null);
}
@Override
public void addToWorld() {
fillQuestInfo(
"Kanmararn Soldiers",
"Some time ago, Sergeant James started with his crew of four brave soldiers on their adventure of finding a treasure in Kanmararn, the city of dwarves. They didn't return yet.",
true);
prepareCowardSoldier();
prepareCorpses();
prepareSergeant();
}
@Override
public List<String> getHistory(final Player player) {
final List<String> res = new ArrayList<String>();
if (!player.hasQuest(QUEST_SLOT)) {
return res;
}
final String questState = player.getQuest(QUEST_SLOT);
res.add("I met a scared soldier in Kanmararn City. He asked me to find his friends, Peter, Charles, and Tom.");
if ("rejected".equals(questState)) {
res.add("I don't want to help Henry.");
return res;
}
if ("start".equals(questState)) {
return res;
}
res.add("Sadly I only found corpses of Peter, Charles, and Tom. Henry was aghast. He gave me a map and an IOU, but didn't say what I should do with them now.");
if ("map".equals(questState)) {
return res;
}
res.add("I met Sergeant James and gave him the treasure map. He gave me an excellent pair of mainio boots in return.");
if (isCompleted(player)) {
return res;
}
// if things have gone wrong and the quest state didn't match any of the above, debug a bit:
final List<String> debug = new ArrayList<String>();
debug.add("Quest state is: " + questState);
logger.error("History doesn't have a matching quest state for " + questState);
return debug;
}
@Override
public String getName() {
return "KanmararnSoldiers";
}
@Override
public int getMinLevel() {
return 40;
}
@Override
public String getNPCName() {
return SLD_HENRY;
}
@Override
public String getRegion() {
return Region.SEMOS_DUNGEONS;
}
}
| gpl-2.0 |
Rochelimit/fayuandi | src/com/barcode/core/FinishListener.java | 1273 | /*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.barcode.core;
import android.app.Activity;
import android.content.DialogInterface;
/**
* Simple listener used to exit the app in a few cases.
*
* @author Sean Owen
*/
public final class FinishListener implements DialogInterface.OnClickListener,
DialogInterface.OnCancelListener {
private final Activity activityToFinish;
public FinishListener(Activity activityToFinish) {
this.activityToFinish = activityToFinish;
}
@Override
public void onCancel(DialogInterface dialogInterface) {
run();
}
@Override
public void onClick(DialogInterface dialogInterface, int i) {
run();
}
private void run() {
activityToFinish.finish();
}
}
| gpl-2.0 |
lveci/nest | beam/beam-core/src/main/java/org/esa/beam/framework/datamodel/ProductNodeListener.java | 1665 | /*
* Copyright (C) 2010 Brockmann Consult GmbH (info@brockmann-consult.de)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 3 of the License, or (at your option)
* any later version.
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, see http://www.gnu.org/licenses/
*/
package org.esa.beam.framework.datamodel;
/**
* A listener which listens to internal data product changes.
*
* @author Norman Fomferra
* @version $Revision$ $Date$
*/
public interface ProductNodeListener {
/**
* Notified when a node changed.
*
* @param event the product node which the listener to be notified
*/
void nodeChanged(ProductNodeEvent event);
/**
* Notified when a node's data changed.
*
* @param event the product node which the listener to be notified
*/
void nodeDataChanged(ProductNodeEvent event);
/**
* Notified when a node was added.
*
* @param event the product node which the listener to be notified
*/
void nodeAdded(ProductNodeEvent event);
/**
* Notified when a node was removed.
*
* @param event the product node which the listener to be notified
*/
void nodeRemoved(ProductNodeEvent event);
}
| gpl-3.0 |
schuellerf/umlet | umlet-swing/src/main/java/com/baselet/element/old/element/Relation.java | 69630 | package com.baselet.element.old.element;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Polygon;
import java.awt.font.TextLayout;
import java.awt.geom.AffineTransform;
import java.awt.geom.Area;
import java.util.Collections;
import java.util.Set;
import java.util.Vector;
import com.baselet.control.HandlerElementMap;
import com.baselet.control.basics.Converter;
import com.baselet.control.basics.geom.Point;
import com.baselet.control.basics.geom.PointDouble;
import com.baselet.control.basics.geom.Rectangle;
import com.baselet.control.constants.Constants;
import com.baselet.control.enums.AlignHorizontal;
import com.baselet.control.enums.Direction;
import com.baselet.control.enums.LineType;
import com.baselet.control.util.Utils;
import com.baselet.diagram.draw.helper.ColorOwn;
import com.baselet.element.Selector;
import com.baselet.element.facet.common.LayerFacet;
import com.baselet.element.interfaces.GridElement;
import com.baselet.element.old.OldGridElement;
import com.baselet.element.old.relation.Arrow;
import com.baselet.element.old.relation.EmptyShape;
import com.baselet.element.old.relation.Multiplicity;
import com.baselet.element.old.relation.NoShape;
import com.baselet.element.old.relation.Port;
import com.baselet.element.old.relation.Qualifier;
import com.baselet.element.old.relation.Role;
import com.baselet.element.sticking.StickingPolygon;
import com.baselet.element.sticking.polygon.NoStickingPolygonGenerator;
@SuppressWarnings("serial")
public class Relation extends OldGridElement {
String beginQualifier;
String endQualifier;
String beginArrow;
String endArrow;
String beginMultiplicity;
String endMultiplicity;
String beginRole;
String endRole;
String lineType;
String eerRelDir;
Vector<String> _strings;
// A.Mueller start
String clientServer;
// A.Mueller end
// G.Mueller start
String beginPort;
String endPort;
String middleArrow;
String csdStartText; // Arrow-Text for composite structure diagram
String csdEndText;
// G.Mueller end
private final float SELECTBOXSIZE = 12;
private final float SELECTCIRCLESIZE = 15;
private Vector<String> getStrings() {
if (_strings == null) {
_strings = new Vector<String>();
}
return _strings;
}
private void setStrings(Vector<String> v) {
_strings = v;
}
private Point getCenterOfLine() {
Point ret = new Point();
if (getLinePoints().size() % 2 == 1) {
ret = getLinePoints().elementAt(getLinePoints().size() / 2);
}
else {
Point p1 = getLinePoints().elementAt(getLinePoints().size() / 2);
Point p2 = getLinePoints().elementAt(getLinePoints().size() / 2 - 1);
ret.x = (p1.x + p2.x) / 2;
ret.y = (p1.y + p2.y) / 2;
}
return ret;
}
@Override
public String getAdditionalAttributes() {
Vector<String> tmp = new Vector<String>();
// tmp.add(beginQualifier);
// tmp.add(endQualifier);
// tmp.add(beginArrow);
// tmp.add(endArrow);
// tmp.add(beginMultiplicity);
// tmp.add(endMultiplicity);
// tmp.add(beginRole);
// tmp.add(endRole);
// tmp.add(lineType);
/* tmp.add(""+this.getX()); tmp.add(""+this.getY()); tmp.add(""+this.getWidth()); tmp.add(""+this.getHeight()); */
for (int i = 0; i < getLinePoints().size(); i++) {
Point p = getLinePoints().elementAt(i);
String s1 = "" + p.x;
String s2 = "" + p.y;
tmp.add(s1);
tmp.add(s2);
}
String ret = Utils.composeStrings(tmp, Constants.DELIMITER_ADDITIONAL_ATTRIBUTES);
return ret;
}
@Override
public void setAdditionalAttributes(String s) {
getLinePoints().clear();
Vector<String> tmp = Utils.decomposeStringsIncludingEmptyStrings(s, Constants.DELIMITER_ADDITIONAL_ATTRIBUTES);
for (int i = 0; i < tmp.size(); i = i + 2) {
int x = Integer.parseInt(tmp.elementAt(i));
int y = Integer.parseInt(tmp.elementAt(i + 1));
getLinePoints().add(new Point(x, y));
}
}
@Override
public void setPanelAttributes(String state) {
beginQualifier = "";
endQualifier = "";
beginArrow = "";
endArrow = "";
beginMultiplicity = "";
endMultiplicity = "";
beginRole = "";
endRole = "";
lineType = "-";
eerRelDir = "";
// G.Mueller.Start
middleArrow = "";
beginPort = "";
endPort = "";
// G.Mueller. End
setStrings(null);
super.setPanelAttributes(state);
Vector<String> tmp = Utils.decomposeStrings(state);
for (int i = 0; i < tmp.size(); i++) {
String s = tmp.elementAt(i);
if (s.startsWith("q1=") & s.length() > 3) {
beginQualifier = s.substring(3, s.length());
}
else if (s.startsWith("q2=") & s.length() > 3) {
endQualifier = s.substring(3, s.length());
}
else if (s.startsWith("m1=") & s.length() > 3) {
beginMultiplicity = s.substring(3, s.length());
}
else if (s.startsWith("m2=") & s.length() > 3) {
endMultiplicity = s.substring(3, s.length());
}
else if (s.startsWith("r1=") & s.length() > 3) {
beginRole = s.substring(3, s.length());
}
else if (s.startsWith("r2=") & s.length() > 3) {
endRole = s.substring(3, s.length());
}
else if (s.startsWith("p1=") & s.length() > 3) {
beginPort = s.substring(3, s.length());
}
else if (s.startsWith("p2=") & s.length() > 3) {
endPort = s.substring(3, s.length());
}
else if (s.startsWith("lt=") & s.length() > 3) {
csdStartText = "";
csdEndText = "";
// ***
if (s.indexOf("<[") >= 0) {
beginArrow = "compStart";
if (s.length() > 6) {
csdStartText = getCSDText(s)[0];
s = s.replace("<[" + csdStartText + "]", "<[]");
}
}
if (s.indexOf("]>") >= 0) {
endArrow = "compEnd";
if (s.length() > 6) {
csdEndText = getCSDText(s)[1];
s = s.replace("[" + csdEndText + "]>", "[]>");
}
}
if (s.indexOf("]<") >= 0) {
beginArrow = beginArrow + "del";
}
if (s.indexOf(">[") >= 0) {
endArrow = endArrow + "del";
}
if (s.indexOf(">>>>>") >= 0) {
endArrow = "<<<";
}
else if (s.indexOf(">>>>") >= 0) {
endArrow = "X";
}
else if (s.indexOf(">>>") >= 0) {
endArrow = "x";
}
else if (s.indexOf(">>") >= 0) {
endArrow = "<<";
}
else if (s.indexOf("m>") >= 0) {
endArrow = "crowsFoot";
}
else if (s.indexOf(">") >= 0) {
if (endArrow.equals("")) {
endArrow = "<";
}
}
if (s.indexOf("<<<<<") >= 0) {
beginArrow = "<<<";
}
else if (s.indexOf("<<<<") >= 0) {
beginArrow = "X";
}
else if (s.indexOf("<<<") >= 0) {
beginArrow = "x";
}
else if (s.indexOf("<<") >= 0) {
beginArrow = "<<";
}
else if (s.indexOf("<m") >= 0) {
beginArrow = "crowsFoot";
}
else if (s.indexOf("<") >= 0) {
if (beginArrow.equals("")) {
beginArrow = "<";
}
}
if (s.indexOf("<EER>") >= 0) {
beginArrow = "";
endArrow = "";
eerRelDir = "EER1";
}
else if (s.indexOf("<EER") >= 0) {
beginArrow = "";
endArrow = "";
eerRelDir = "EER2";
}
else if (s.indexOf("EER>") >= 0) {
beginArrow = "";
endArrow = "";
eerRelDir = "EER3";
}
else if (s.indexOf("EER") >= 0) {
beginArrow = "";
endArrow = "";
eerRelDir = "EER_SUBCLASS";
}
// A.Mueller Beginn
clientServer = "";
// ***
if (s.indexOf("(()") >= 0) {
// beginArrow = ""; G.Mueller
clientServer = "provideRequire";
}
else if (s.indexOf("())") >= 0) {
// endArrow = ""; G.Mueller
clientServer = "requireProvide";
}
if (s.indexOf("<(+)") >= 0) {
beginArrow = "packageStart";
clientServer = " ";
}
else if (s.indexOf("<()") >= 0) {
clientServer = "start"; // used for setting the startpoint
// nonstickable
beginArrow = "require";
}
else if (s.indexOf("<(") >= 0) {
clientServer = "start"; // used for setting the
// startpoint
// not stickable
beginArrow = "provide";
}
else if (s.indexOf("<x") >= 0) {
beginArrow = "n";
}
if (s.indexOf("(+)>") >= 0) {
endArrow = "packageEnd";
clientServer = " ";
}
else if (s.indexOf("()>") >= 0) {
clientServer = "end"; // used for setting the endpoint
// nonstickable
endArrow = "require";
}
else if (s.indexOf(")>") >= 0) {
clientServer = "end"; // used for setting the endpoint
// nonstickable
endArrow = "provide";
}
else if (s.indexOf("x>") >= 0) {
endArrow = "n";
}
// A.Mueller End
// Mueller G. End
// Mueller G. Start
if (s.indexOf(">()") >= 0 && clientServer.equals("")) {
middleArrow = "delegationArrowRight";
if (endArrow.equals("<")) {
endArrow = "";
}
}
else if (s.indexOf("()<") >= 0 && clientServer.equals("")) {
middleArrow = "delegationArrowLeft";
if (beginArrow.equals("<")) {
beginArrow = "";
}
}
else if (s.indexOf("()") >= 0 && clientServer.equals("")) {
middleArrow = "delegation";
}
else if (s.indexOf("(") >= 0 && clientServer.equals("")) {
middleArrow = "delegationStart";
lineType = "-.";
}
else if (s.indexOf(")") >= 0 && clientServer.equals("")) {
middleArrow = "delegationEnd";
lineType = ".-";
}
// G.Mueller: LineTyp check here:
if (s.indexOf(".") >= 0 & s.indexOf("-") >= s.indexOf(".")) {
lineType = ".-";
}
else if (s.indexOf("-") >= 0 & s.indexOf(".") >= s.indexOf("-")) {
lineType = "-.";
}
else if (s.indexOf(LineType.DOTTED.getValue()) >= 0) {
lineType = LineType.DOTTED.getValue();
}
else if (s.indexOf(LineType.DASHED.getValue()) >= 0) {
lineType = LineType.DASHED.getValue();
}
else if (s.indexOf(LineType.SOLID.getValue()) >= 0) {
lineType = LineType.SOLID.getValue();
}
else if (s.substring(3, s.length()).indexOf(LineType.DOUBLE.getValue()) >= 0) {
lineType = LineType.DOUBLE.getValue();
}
else if (s.indexOf(LineType.DOUBLE_DOTTED.getValue()) >= 0) {
lineType = LineType.DOUBLE_DOTTED.getValue();
}
else if (s.indexOf(LineType.DOUBLE_DASHED.getValue()) >= 0) {
lineType = LineType.DOUBLE_DASHED.getValue();
}
}
else {
getStrings().add(s);
}
}
} // Created objects have no sideeffects
// Only exception: no point is outside shape
// At least 2 points must be provided
public static Vector<Point> getIntersectingLineSegment(Area r, Vector<Point> points) {
Vector<Point> ret = new Vector<Point>();
// If no segment found, take last two points
Point pp_end = points.elementAt(points.size() - 1);
Point pp_start = points.elementAt(points.size() - 2);
for (int i = 1; i < points.size(); i++) {
pp_end = points.elementAt(i);
if (!r.contains(Converter.convert(pp_end))) {
// End point of intersecting line found
pp_start = points.elementAt(i - 1);
ret.add(pp_start);
ret.add(pp_end);
return ret;
}
}
ret.add(pp_start);
ret.add(pp_end);
return ret;
}
public static Point moveNextTo(Area rFixed, Rectangle rMovable, Point pStart, Point pEnd) {
// These ints can simply be added to line
int centerDiffX = -rMovable.getWidth() / 2;
int centerDiffY = -rMovable.getHeight() / 2;
int vectorX = pEnd.x - pStart.x;
int vectorY = pEnd.y - pStart.y;
int startx = pStart.x;
int starty = pStart.y;
int endx = pEnd.x;
int endy = pEnd.y;
for (@SuppressWarnings("unused")
int i = 0;; i++) {
endx += vectorX;
endy += vectorY;
rMovable.setLocation(endx + centerDiffX, endy + centerDiffY);
if (!rFixed.intersects(Converter.convert(rMovable))) {
break;
}
}
int newx = 0;
int newy = 0;
for (int i = 0; i < 10; i++) {
newx = (endx + startx) / 2;
newy = (endy + starty) / 2;
rMovable.setLocation(newx + centerDiffX, newy + centerDiffY);
if (rFixed.intersects(Converter.convert(rMovable))) {
startx = newx;
starty = newy;
}
else {
endx = newx;
endy = newy;
}
}
Point ret = new Point(newx + centerDiffX, newy + centerDiffY);
return ret;
}
public boolean lineUp(Vector<Rectangle> shapes, Vector<Point> points, int hotspotx, int hotspoty) {
float zoom = HandlerElementMap.getHandlerForElement(this).getZoomFactor();
// Remove point with the same coordinates
for (int i = points.size() - 1; i > 0; i--) {
Point p1 = points.elementAt(i);
Point p2 = points.elementAt(i - 1);
if (p1.x == p2.x & p1.y == p2.y) {
points.removeElementAt(i);
}
}
if (points.size() <= 1) {
return false;
}
if (shapes.size() <= 1) {
return true;
}
// Vector ret=new Vector();
// Rectangle rFixed;
Rectangle rMovable;
Area tmpArea = new Area();
for (int i = 0; i < shapes.size() - 1; i++) {
Rectangle r = shapes.elementAt(i);
if (i == 0) { // The hotspot of the first element is set
Point p = points.elementAt(0);
r.setLocation(p.x - hotspotx, p.y - hotspoty);
}
Area a = new Area(Converter.convert(r));
tmpArea.add(a);
// rFixed=(Rectangle)shapes.elementAt(i);
rMovable = shapes.elementAt(i + 1);
/* if (i==0) { // The hotspot of the first element is set Point p=(Point)points.elementAt(0); rFixed.setLocation(p.x-hotspotx,p.y-hotspoty); } */
Vector<Point> tmp = getIntersectingLineSegment(tmpArea, points);
Point startIntersectingLine = tmp.elementAt(0);
Point endIntersectingLine = tmp.elementAt(1);
Point res = moveNextTo(tmpArea, rMovable, startIntersectingLine, endIntersectingLine);
// ret.add(res);
if (rMovable instanceof Arrow) {
Arrow arrow = (Arrow) rMovable;
Point diffA = new Point(-startIntersectingLine.x + endIntersectingLine.x, -startIntersectingLine.y + endIntersectingLine.y);
Point diffB1 = new Point(diffA.y, -diffA.x);
Point diffB2 = new Point(-diffB1.x, -diffB1.y);
Point a1 = new Point(2 * diffA.x + diffB1.x, 2 * diffA.y + diffB1.y);
Point a2 = new Point(2 * diffA.x + diffB2.x, 2 * diffA.y + diffB2.y);
a1 = Utils.normalize(a1, (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
a2 = Utils.normalize(a2, (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
arrow.setArrowEndA(a1);
arrow.setArrowEndB(a2);
// A.Mueller start
if (arrow.getString().equals("n")) {
// this is pretty much the same as above, but it
// was hard to work out what it does so here it
// is repeated with better names and some
// comments. I only made the original vector longer and
// increased the pixelsize(fontsize)
// to get the point further towards the center.
Point start = startIntersectingLine;
Point end = endIntersectingLine;
// vectorA is the vector between the two points which is the
// line between the points...
Point vectorA = new Point(-start.x + end.x, -start.y + end.y);
// vector down is a vector standing 90 degrees on the line,
// vector up is the same in the opposite direction..
Point vectorDown = new Point(vectorA.y, -vectorA.x);
Point vectorUp = new Point(-vectorDown.x, -vectorDown.y);
Point newA1 = new Point(4 * vectorA.x + vectorDown.x, 4 * vectorA.y + vectorDown.y);
Point newA2 = new Point(4 * vectorA.x + vectorUp.x, 4 * vectorA.y + diffB2.y);
// this calculates the proportion of the two dimensions of
// the point compared to each other
// (which means propX + propY = 1) and multiplies it with
// the second parameter...
newA1 = Utils.normalize(newA1, (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() * 2);
newA2 = Utils.normalize(newA2, (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() * 2);
arrow.setCrossEndA(newA1);
arrow.setCrossEndB(newA2);
}
else if (arrow.getString().equals("require")) {
int size = (int) (20 * zoom);
Point start = startIntersectingLine;
Point end = endIntersectingLine;
Point upperLeft = new Point();
Point bottomDown = new Point();
if (start.getX() > end.getX()) {
upperLeft = new Point(0, -size / 2);
bottomDown = new Point(size, size / 2);
}
else if (start.getX() < end.getX()) {
upperLeft = new Point(-size, -size / 2);
bottomDown = new Point(0, size / 2);
}
else if (start.getX() == end.getX()) {
if (start.getY() < end.getY()) {
upperLeft = new Point(-size / 2, -size);
bottomDown = new Point(size / 2, 0);
}
else if (start.getY() > end.getY()) {
upperLeft = new Point(-size / 2, 0);
bottomDown = new Point(size / 2, size);
}
}
arrow.setCrossEndA(upperLeft);
arrow.setCrossEndB(bottomDown);
}
else if (arrow.getString().equals("provide")) {
int size = (int) (30 * zoom);
Point start = startIntersectingLine;
Point end = endIntersectingLine;
Point upperLeft = new Point();
Point bottomDown = new Point();
if (start.getX() > end.getX()) {
upperLeft = new Point(0, -size / 2);
bottomDown = new Point(size, size / 2);
arrow.setArcStart(90);
arrow.setArcEnd(180);
}
else if (start.getX() < end.getX()) {
upperLeft = new Point(-size, -size / 2);
bottomDown = new Point(0, size / 2);
arrow.setArcStart(90);
arrow.setArcEnd(-180);
}
else if (start.getX() == end.getX()) {
if (start.getY() < end.getY()) {
upperLeft = new Point(-size / 2, -size);
bottomDown = new Point(size / 2, 0);
arrow.setArcStart(0);
arrow.setArcEnd(-180);
}
else if (start.getY() > end.getY()) {
upperLeft = new Point(-size / 2, 0);
bottomDown = new Point(size / 2, size);
arrow.setArcStart(0);
arrow.setArcEnd(180);
}
}
arrow.setCrossEndA(upperLeft);
arrow.setCrossEndB(bottomDown);
// A.Mueller end
}
}
// ATTENTION: this Recangle will become the rFixed in the next loop
rMovable.setLocation(res.x, res.y);
}
return true;
}
/**
* A relation is only in range (= in the selection frame) if every relation point is in the selection frame
*/
@Override
public boolean isInRange(Rectangle rect1) {
// Assume that the rect contains all relation points
for (Point p : getLinePoints()) {
// We must add the displacement from the top left corner of the drawpanel to the point coordinates
Point realPoint = new Point(p.getX() + getRectangle().x, p.getY() + getRectangle().y);
// If only one point is not in the selection rectangle, the method returns false
if (!rect1.contains(realPoint)) {
return false;
}
}
return true;
}
public boolean isOnLine(int i) {
if (i - 1 >= 0 & i + 1 < getLinePoints().size()) {
Point x1 = getLinePoints().elementAt(i - 1);
Point x2 = getLinePoints().elementAt(i + 1);
Point p = getLinePoints().elementAt(i);
if (p.distance(x1) + p.distance(x2) < x1.distance(x2) + 5) {
return true;
}
}
return false;
}
public int getWhereToInsert(Point p) {
for (int i = 0; i < getLinePoints().size() - 1; i++) {
Point x1 = getLinePoints().elementAt(i);
Point x2 = getLinePoints().elementAt(i + 1);
if (p.distance(x1) + p.distance(x2) < x1.distance(x2) + 5) {
return i + 1;
}
}
return -1;
}
public int getLinePoint(Point p) {
float zoom = HandlerElementMap.getHandlerForElement(this).getZoomFactor();
for (int i = 0; i < getLinePoints().size(); i++) {
Point x = getLinePoints().elementAt(i);
if (p.distance(x) < SELECTCIRCLESIZE * zoom) {
return i;
}
}
return -1;
}
private <T> Vector<T> flipVector(Vector<T> v) {
Vector<T> ret = new Vector<T>();
for (int i = v.size() - 1; i >= 0; i--) {
ret.add(v.elementAt(i));
}
return ret;
}
@Override
public boolean contains(java.awt.Point p) {
// other relations which are selected are prioritized
for (GridElement other : HandlerElementMap.getHandlerForElement(this).getDrawPanel().getGridElements()) {
Selector s = HandlerElementMap.getHandlerForElement(other).getDrawPanel().getSelector();
if (other != this && other instanceof Relation && s.isSelected(other)) {
int xDist = getRectangle().x - other.getRectangle().x;
int yDist = getRectangle().y - other.getRectangle().y;
Point modifiedP = new Point(p.x + xDist, p.y + yDist); // the point must be modified, because the other relation has other coordinates
boolean containsHelper = ((Relation) other).calcContains(modifiedP);
if (s.isSelected(other) && containsHelper) {
return false;
}
}
}
return calcContains(Converter.convert(p));
}
private boolean calcContains(Point p) {
float zoom = HandlerElementMap.getHandlerForElement(this).getZoomFactor();
for (int i = 0; i < getLinePoints().size(); i++) {
Point x = getLinePoints().elementAt(i);
if (p.distance(x) < SELECTCIRCLESIZE * zoom) {
return true;
}
}
for (int i = 0; i < getLinePoints().size() - 1; i++) {
Point x1 = getLinePoints().elementAt(i);
Point x2 = getLinePoints().elementAt(i + 1);
if (p.distance(x1) + p.distance(x2) > x1.distance(x2) + 5) {
continue;
}
// system origin translated to x1
double p1x = x2.getX() - x1.getX();
double p1y = x2.getY() - x1.getY();
double p2x = p.getX() - x1.getX();
double p2y = p.getY() - x1.getY();
// constant - calculated constant by rotating line + calculation intersection point
double c = (p1x * p2x + p1y * p2y) / (p1x * p1x + p1y * p1y);
// intersection point
double i1x = p1x * c;
double i1y = p1y * c;
// abstand
double ax = i1x - p2x;
double ay = i1y - p2y;
double a = Math.sqrt(ax * ax + ay * ay);
if (a < 5) {
return true;
}
}
return false;
}
@Override
public boolean contains(int x, int y) {
return contains(new java.awt.Point(x, y));
}
private Vector<Point> _points;
public Vector<Point> getLinePoints() {
if (_points == null) {
_points = new Vector<Point>();
}
return _points;
}
public void moveLinePoint(int index, int diffx, int diffy) {
Point p = getLinePoints().elementAt(index);
p.move(diffx, diffy);
repaint();
}
@Override
public GridElement cloneFromMe() {
Relation c = new Relation();
c.setPanelAttributes(getPanelAttributes());
c.setAdditionalAttributes(getAdditionalAttributes());
c.setVisible(true);
c.setRectangle(getRectangle());
HandlerElementMap.getHandlerForElement(this).setHandlerAndInitListeners(c);
return c;
}
// Polygon to draw the move whole line rectangle + check if it contains the mouse
public Polygon getWholeLinePolygon() {
float zoom = HandlerElementMap.getHandlerForElement(this).getZoomFactor();
Polygon p = new Polygon();
Point mid;
int s = getLinePoints().size();
if (s % 2 == 0 && s > 0) {
mid = getCenterOfLine();
}
else if (s > 2) {
Point p1 = getLinePoints().elementAt(getLinePoints().size() / 2);
Point p2 = getLinePoints().elementAt(getLinePoints().size() / 2 + 1);
mid = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
}
else {
return null;
}
int size = (int) (SELECTBOXSIZE * zoom);
size = size / 2;
p.addPoint(mid.x - size, mid.y - size);
p.addPoint(mid.x + size, mid.y - size);
p.addPoint(mid.x + size, mid.y + size);
p.addPoint(mid.x - size, mid.y + size);
return p;
}
// checks if the point is contained in the polygon above
public boolean isWholeLine(int x, int y) {
Polygon p = getWholeLinePolygon();
if (p == null) {
return false;
}
if (p.contains(x, y)) {
return true;
}
return false;
}
/* (non-Javadoc)
* @see main.element.base.Entity#paintEntity(java.awt.Graphics) */
/* (non-Javadoc)
* @see main.element.base.Entity#paintEntity(java.awt.Graphics) */
@Override
public void paintEntity(Graphics g) {
float zoom = HandlerElementMap.getHandlerForElement(this).getZoomFactor();
Graphics2D g2 = (Graphics2D) g;
g2.setFont(HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont());
colorize(g2); // enable colors
g2.setColor(fgColor);
// Just to set anti-aliasing, even if no text
// operations occur
// g2.setColor(Color.MAGENTA);
// g2.drawLine(0,0,0,2);
// g2.drawLine(0,0,2,0);
// g2.drawLine(this.getWidth()-1-2,this.getHeight()-1, this.getWidth()-1,this.getHeight()-1);
// g2.drawLine(this.getWidth()-1,this.getHeight()-1-2, this.getWidth()-1,this.getHeight()-1);
// g2.setColor(activeColor);
Vector<Rectangle> startShapes = new Vector<Rectangle>();
Vector<Rectangle> endShapes = new Vector<Rectangle>();
startShapes.add(new NoShape());
endShapes.add(new NoShape());
if (beginQualifier != null && beginQualifier.length() > 0) {
TextLayout tl = new TextLayout(beginQualifier, HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont(), g2.getFontRenderContext());
Qualifier q = new Qualifier(beginQualifier, 0, 0, (int) tl.getBounds().getWidth() + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() * 2, (int) tl.getBounds().getHeight() + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() / 2);
startShapes.add(q);
}
if (endQualifier != null && endQualifier.length() > 0) {
TextLayout tl = new TextLayout(endQualifier, HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont(),
g2.getFontRenderContext());
Qualifier q = new Qualifier(endQualifier, 0, 0, (int) tl.getBounds().getWidth() + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() * 2, (int) tl.getBounds().getHeight() + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() / 2);
endShapes.add(q);
}
if (beginArrow != null && beginArrow.length() > 0) {
Arrow a = new Arrow(beginArrow);
startShapes.add(a);
}
if (endArrow != null && endArrow.length() > 0) {
Arrow a = new Arrow(endArrow);
endShapes.add(a);
}
if (beginMultiplicity != null && beginMultiplicity.length() > 0) {
EmptyShape e = new EmptyShape((int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
startShapes.add(e);
TextLayout tl = new TextLayout(beginMultiplicity, HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont(), g2.getFontRenderContext());
Multiplicity m = new Multiplicity(beginMultiplicity, 0, 0, (int) tl.getBounds().getWidth(),
(int) tl.getBounds().getHeight());
startShapes.add(m);
}
if (endMultiplicity != null && endMultiplicity.length() > 0) {
EmptyShape e = new EmptyShape((int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
endShapes.add(e);
TextLayout tl = new TextLayout(endMultiplicity, HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont(), g2.getFontRenderContext());
Multiplicity m = new Multiplicity(endMultiplicity, 0, 0, (int) tl.getBounds().getWidth(),
(int) tl.getBounds().getHeight());
endShapes.add(m);
}
if (beginRole != null && beginRole.length() > 0) {
EmptyShape e = new EmptyShape((int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
startShapes.add(e);
// A.Mueller start
// calculating the width if we break lines...
int position = 0;
int lineBreaks = 0;
double broadestText = HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(beginRole);
while (position != 1) {
int positionNew = beginRole.indexOf("\\\\", position);
if (position == 0 && positionNew != -1) {
broadestText = 0;
}
if (positionNew != -1) {
broadestText = Math.max(broadestText, HandlerElementMap.getHandlerForElement(this).getFontHandler()
.getTextWidth(beginRole.substring(position,
positionNew)));
if (beginRole.lastIndexOf("\\\\") + 2 != beginRole.length()) {
broadestText = Math.max(broadestText, HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(beginRole.substring(beginRole.lastIndexOf("\\\\") + 2, beginRole.length())));
}
lineBreaks++;
}
position = positionNew + 2;
}
Role r = new Role(beginRole, 0, 0, (int) broadestText, lineBreaks * (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() + (lineBreaks + 2) * (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getDistanceBetweenTexts());
// <OLDCODE>
/* TextLayout tl = new TextLayout(beginRole, this.getHandler().getFont(), Constants.getFRC(g2)); Role r = new Role(beginRole, 0, 0, (int) tl.getBounds().getWidth(), (int) tl.getBounds().getHeight()); */
// </OLDCODE>
// A.Mueller end
startShapes.add(r);
}
if (endRole != null && endRole.length() > 0) {
EmptyShape e = new EmptyShape((int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
endShapes.add(e);
// A.Mueller start
// calculating the width if we break lines...
int position = 0;
int lineBreaks = 0;
double broadestText = HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(endRole);
while (position != 1) {
int positionNew = endRole.indexOf("\\\\", position);
if (position == 0 && positionNew != -1) {
broadestText = 0;
}
if (positionNew != -1) {
broadestText = Math.max(broadestText, HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(endRole.substring(position, positionNew)));
if (endRole.lastIndexOf("\\\\") + 2 != endRole.length()) {
broadestText = Math.max(broadestText,
HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(endRole.substring(endRole.lastIndexOf("\\\\") + 2,
endRole.length())));
}
lineBreaks++;
}
position = positionNew + 2;
}
Role r = new Role(endRole, 0, 0, (int) broadestText, lineBreaks * (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() + (lineBreaks + 2) * (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getDistanceBetweenTexts());
// <OLDCODE>
/* TextLayout tl = new TextLayout(endRole, this.getHandler().getFont(), Constants.getFRC(g2)); Role r = new Role(endRole, 0, 0, (int) tl.getBounds().getWidth(), (int) tl.getBounds().getHeight()); */
// </OLDCODE>
// A.Mueller end
endShapes.add(r);
}
// G.Mueller start
if (beginPort != null && beginPort.length() > 0) {
EmptyShape e = new EmptyShape((int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
startShapes.add(e);
TextLayout tl = new TextLayout(beginPort, HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont(), g2.getFontRenderContext());
Port p = new Port(beginPort, 0, 0, (int) tl.getBounds().getWidth(), (int) tl.getBounds().getHeight());
startShapes.add(p);
}
if (endPort != null && endPort.length() > 0) {
EmptyShape e = new EmptyShape((int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize());
endShapes.add(e);
TextLayout tl = new TextLayout(endPort, HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont(), g2.getFontRenderContext());
Port p = new Port(endPort, 0, 0, (int) tl.getBounds().getWidth(), (int) tl.getBounds().getHeight());
endShapes.add(p);
}
// G.Mueller end
// ******************************************************************
Vector<Point> startPoints = new Vector<Point>(getLinePoints());
Vector<Point> endPoints = flipVector(startPoints);
boolean a = lineUp(startShapes, startPoints, 0, 0);
boolean b = lineUp(endShapes, endPoints, 0, 0);
if (!a || !b) {
return;
}
// G.Mueller change begin
if (lineType.equals("-.")) {
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
}
else if (lineType.equals(".-")) {
g2.setStroke(Utils.getStroke(LineType.DASHED, 1));
}
else if (lineType.equals(LineType.SOLID.getValue())) {
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
}
else if (lineType.equals(LineType.DASHED.getValue())) {
g2.setStroke(Utils.getStroke(LineType.DASHED, 1));
}
else if (lineType.equals(LineType.DOTTED.getValue())) {
g2.setStroke(Utils.getStroke(LineType.DOTTED, 1));
}
else if (lineType.equals(LineType.DOUBLE.getValue())) {
g2.setStroke(Utils.getStroke(LineType.DOUBLE, 1));
}
else if (lineType.equals(LineType.DOUBLE_DASHED.getValue())) {
g2.setStroke(Utils.getStroke(LineType.DOUBLE_DASHED, 1));
}
else if (lineType.equals(LineType.DOUBLE_DOTTED.getValue())) {
g2.setStroke(Utils.getStroke(LineType.DOUBLE_DOTTED, 1));
}
for (int i = 0; i < getLinePoints().size() - 1; i++) {
if (i == Math.floor((getLinePoints().size() - 1) / 2.0)) {
Point p1 = getLinePoints().elementAt(i);
Point p2 = getLinePoints().elementAt(i + 1);
// G.Mueller start
Point pm = new Point(p1.x - (p1.x - p2.x) / 2, p1.y - (p1.y - p2.y) / 2);
g2.drawLine(p1.x, p1.y, pm.x, pm.y);
if (lineType.equals("-.")) {
g2.setStroke(Utils.getStroke(LineType.DASHED, 1));
}
if (lineType.equals(".-")) {
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
}
g2.drawLine(pm.x, pm.y, p2.x, p2.y);
// g2.drawLine(p1.x, p1.y, p2.x, p2.y);
// G. Mueller end
// ##########################################################################################
// ##########################################################################################
if (eerRelDir.indexOf("EER_SUBCLASS") >= 0) {
Point px1 = getLinePoints().elementAt(i);
Point px2 = getLinePoints().elementAt(i + 1);
Point mitte = new Point(px1.x - (px1.x - px2.x) / 2, px1.y - (px1.y - px2.y) / 2);
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
int cx = mitte.x;
int cy = mitte.y;
double winkel = Utils.getAngle(px1.x, px1.y, px2.x,
px2.y);
at2.rotate(winkel, cx, cy);
g2.setTransform(at2);
g2.setColor(fgColor);
g2.setStroke(Utils.getStroke(LineType.SOLID, 2));
g2.drawArc(mitte.x, mitte.y - (int) (10 * zoom), (int) (20 * zoom), (int) (20 * zoom), 90, 180);
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
g2.setTransform(at);
}
else if (eerRelDir.indexOf("EER") >= 0) {
Point px1 = getLinePoints().elementAt(i);
Point px2 = getLinePoints().elementAt(i + 1);
Point mitte = new Point(px1.x - (px1.x - px2.x) / 2, px1.y - (px1.y - px2.y) / 2);
int recSize = (int) (20 * zoom);
Point r1 = new Point(mitte.x, mitte.y - recSize);
Point r2 = new Point(mitte.x + recSize, mitte.y);
Point r3 = new Point(mitte.x, mitte.y + recSize);
Point r4 = new Point(mitte.x - recSize, mitte.y);
Polygon po1 = new Polygon();
po1.addPoint(r1.x, r1.y);
po1.addPoint(r2.x, r2.y);
po1.addPoint(r3.x, r3.y);
Polygon po2 = new Polygon();
po2.addPoint(r1.x, r1.y);
po2.addPoint(r3.x, r3.y);
po2.addPoint(r4.x, r4.y);
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
int cx = mitte.x;
int cy = mitte.y;
double winkel = Utils.getAngle(px1.x, px1.y, px2.x, px2.y);
at2.rotate(winkel, cx, cy);
g2.setTransform(at2);
if (eerRelDir.equals("EER1")) {
g2.setColor(fgColor);
g2.fillPolygon(po1);
g2.fillPolygon(po2);
}
else if (eerRelDir.equals("EER2")) {
g2.setColor(bgColor);
g2.fillPolygon(po2);
g2.setColor(fgColor);
g2.fillPolygon(po1);
}
else if (eerRelDir.equals("EER3")) {
g2.setColor(bgColor);
g2.fillPolygon(po1);
g2.setColor(fgColor);
g2.fillPolygon(po2);
}
g2.setColor(fgColor);
g2.draw(po1);
g2.draw(po2);
g2.setTransform(at);
}
// A.Mueller start
else if (clientServer != null && clientServer.indexOf("rovide") >= 0) {
Point px1 = getLinePoints().elementAt(i);
Point px2 = getLinePoints().elementAt(i + 1);
Point mitte = new Point(px1.x - (px1.x - px2.x) / 2, px1.y - (px1.y - px2.y) / 2);
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
int cx = mitte.x;
int cy = mitte.y;
double winkel = Utils.getAngle(px1.x, px1.y, px2.x,
px2.y);
at2.rotate(winkel, cx, cy);
g2.setTransform(at2);
Point outerArc = new Point(mitte.x - (int) (15 * zoom), mitte.y - (int) (15 * zoom));
Point innerCircle = new Point();
g2.setColor(Color.white);
g2.fillOval(outerArc.x, outerArc.y, (int) (30 * zoom), (int) (30 * zoom));
g2.setColor(fgColor);
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
if (clientServer.equals("provideRequire")) {
g2.drawArc(outerArc.x, outerArc.y, (int) (30 * zoom), (int) (30 * zoom), 90, 180);
innerCircle = new Point(mitte.x - (int) (5 * zoom), mitte.y - (int) (10 * zoom));
}
else if (clientServer.equals("requireProvide")) {
g2.drawArc(outerArc.x, outerArc.y, (int) (30 * zoom), (int) (30 * zoom), 90, -180);
innerCircle = new Point(mitte.x - (int) (15 * zoom), mitte.y - (int) (10 * zoom));
}
g2.drawOval(innerCircle.x, innerCircle.y, (int) (20 * zoom), (int) (20 * zoom));
g2.setTransform(at);
}
// A.Mueller end
// G.Mueller start
else if (middleArrow.startsWith("delegation")) {
Point px1 = getLinePoints().elementAt(i);
Point px2 = getLinePoints().elementAt(i + 1);
Point mitte = new Point(px1.x - (px1.x - px2.x) / 2, px1.y - (px1.y - px2.y) / 2);
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
int cx = mitte.x;
int cy = mitte.y;
double winkel = Utils.getAngle(px1.x, px1.y, px2.x, px2.y);
at2.rotate(winkel, cx, cy);
g2.setTransform(at2);
Point circle = new Point(mitte.x - (int) (15 * zoom), mitte.y - (int) (15 * zoom));
if (middleArrow.equals("delegation")) {
g2.setColor(Color.white);
g2.fillOval(circle.x + (int) (5 * zoom), circle.y + (int) (5 * zoom), (int) (20 * zoom), (int) (20 * zoom));
g2.setColor(fgColor);
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
g2.drawOval(circle.x + (int) (5 * zoom), circle.y + (int) (5 * zoom), (int) (20 * zoom), (int) (20 * zoom));
}
if (middleArrow.startsWith("delegationArrow")) {
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
if (middleArrow.equals("delegationArrowRight")) {
g2.drawLine(circle.x + (int) (5 * zoom), circle.y + (int) (15 * zoom), circle.x - (int) (5 * zoom), circle.y + (int) (9 * zoom));
g2.drawLine(circle.x + (int) (5 * zoom), circle.y + (int) (15 * zoom), circle.x - (int) (5 * zoom), circle.y + (int) (20 * zoom));
}
if (middleArrow.equals("delegationArrowLeft")) {
g2.drawLine(circle.x + (int) (25 * zoom), circle.y + (int) (15 * zoom), circle.x + (int) (35 * zoom), circle.y + (int) (9 * zoom));
g2.drawLine(circle.x + (int) (25 * zoom), circle.y + (int) (15 * zoom), circle.x + (int) (35 * zoom), circle.y + (int) (20 * zoom));
}
g2.setColor(Color.white);
g2.fillOval(circle.x + (int) (5 * zoom), circle.y + (int) (5 * zoom), (int) (20 * zoom), (int) (20 * zoom));
g2.setColor(fgColor);
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
g2.drawOval(circle.x + (int) (5 * zoom), circle.y + (int) (5 * zoom), (int) (20 * zoom), (int) (20 * zoom));
}
if (middleArrow.equals("delegationStart")) {
g2.setColor(Color.white);
g2.fillArc(circle.x, circle.y, (int) (30 * zoom), (int) (30 * zoom), 90, 180);
g2.setColor(fgColor);
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
g2.drawArc(circle.x, circle.y, (int) (30 * zoom), (int) (30 * zoom), 90, 180);
}
if (middleArrow.equals("delegationEnd")) {
g2.setColor(Color.white);
g2.fillArc(circle.x, circle.y, (int) (30 * zoom), (int) (30 * zoom), 90, -180);
g2.setColor(fgColor);
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
g2.drawArc(circle.x, circle.y, (int) (30 * zoom), (int) (30 * zoom), 90, -180);
}
g2.setTransform(at);
}
// G.Mueller end
// ##########################################################################################
// ##########################################################################################
if (lineType.equals("-.")) {
g2.setStroke(Utils.getStroke(LineType.DASHED, 1));
}
if (lineType.equals(".-")) {
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
}
}
else {
Point p1 = getLinePoints().elementAt(i);
Point p2 = getLinePoints().elementAt(i + 1);
g2.drawLine(p1.x, p1.y, p2.x, p2.y);
}
}
g2.setStroke(Utils.getStroke(LineType.SOLID, 1));
if (HandlerElementMap.getHandlerForElement(this).getDrawPanel().getSelector().isSelected(this)) {
for (int i = 0; i < getLinePoints().size(); i++) {
Point p = getLinePoints().elementAt(i);
int start = (int) (SELECTCIRCLESIZE / 15 * 10 * zoom);
int width = (int) (SELECTCIRCLESIZE / 15 * 20 * zoom);
g2.drawOval(p.x - start, p.y - start, width, width);
}
// DRAW Moveall Rect
Polygon poly = getWholeLinePolygon();
if (poly != null) {
g2.drawPolygon(poly);
}
}
Vector<Rectangle> tmp = new Vector<Rectangle>(startShapes);
tmp.addAll(endShapes);
for (int i = 0; i < tmp.size(); i++) {
Rectangle r = tmp.elementAt(i);
if (r instanceof Qualifier) {
Qualifier q = (Qualifier) r;
// begin B. Buckl
g.setColor(bgColor);
g.fillRect(r.getX(), r.getY(), r.getWidth(), r.getHeight());
g.setColor(fgColor);
// end
g.drawRect(r.getX(), r.getY(), r.getWidth(), r.getHeight());
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, q.getString(), r.getX() + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize(), r.getY() + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize(), AlignHorizontal.LEFT);
}
else if (r instanceof Arrow) {
Arrow arrow = (Arrow) r;
if (arrow.getString().equals("crowsFoot")) {
g2.drawLine(arrow.getX(), arrow.getY() + arrow.getArrowEndA().y,
arrow.getX() + arrow.getArrowEndA().x, arrow.getY());
g2.drawLine(arrow.getX(), arrow.getY() + arrow.getArrowEndB().y,
arrow.getX() + arrow.getArrowEndB().x, arrow.getY());
// A.Mueller Start
}
else if (!arrow.getString().equals("n") && !arrow.getString().equals("require") && !arrow.getString().equals("provide") && !arrow.getString().startsWith("package") && !arrow.getString().startsWith("comp")) {
// A.Mueller end
g2.drawLine(arrow.getX(), arrow.getY(),
arrow.getX() + arrow.getArrowEndA().x,
arrow.getY() + arrow.getArrowEndA().y);
g2.drawLine(arrow.getX(), arrow.getY(),
arrow.getX() + arrow.getArrowEndB().x,
arrow.getY() + arrow.getArrowEndB().y);
// A.Mueller start
}
// A.Mueller end
if (arrow.getString().equals("<<<")) { // LME
// filled arrow head
int[] ax = new int[3];
int[] ay = new int[3];
ax[0] = arrow.getX();
ax[1] = arrow.getX() + arrow.getArrowEndA().x;
ax[2] = arrow.getX() + arrow.getArrowEndB().x;
ay[0] = arrow.getY();
ay[1] = arrow.getY() + arrow.getArrowEndA().y;
ay[2] = arrow.getY() + arrow.getArrowEndB().y;
Polygon myPg = new Polygon(ax, ay, 3);
g2.fill(myPg);
g2.draw(myPg);
}
else if (arrow.getString().equals("<<")) {
// begin B. Buckl
int[] ax = new int[3];
int[] ay = new int[3];
ax[0] = arrow.getX();
ax[1] = arrow.getX() + arrow.getArrowEndA().x;
ax[2] = arrow.getX() + arrow.getArrowEndB().x;
ay[0] = arrow.getY();
ay[1] = arrow.getY() + arrow.getArrowEndA().y;
ay[2] = arrow.getY() + arrow.getArrowEndB().y;
Polygon myPg = new Polygon(ax, ay, 3);
g2.setColor(bgColor);
g2.fill(myPg);
g2.setColor(fgColor);
g2.draw(myPg);
// g2.drawLine((int)arrow.getX()+(int)arrow.getArrowEndA().x,
// (int)arrow.getY()+(int)arrow.getArrowEndA().y,
// (int)arrow.getX()+(int)arrow.getArrowEndB().x,
// (int)arrow.getY()+(int)arrow.getArrowEndB().y);
} // end B. Buckl
else if (arrow.getString().equals("x")) {
int[] ax = new int[4];
int[] ay = new int[4];
ax[0] = arrow.getX();
ay[0] = arrow.getY();
ax[1] = arrow.getX() + arrow.getArrowEndA().x;
ay[1] = arrow.getY() + arrow.getArrowEndA().y;
ax[3] = arrow.getX() + arrow.getArrowEndB().x;
ay[3] = arrow.getY() + arrow.getArrowEndB().y;
ax[2] = -arrow.getX() + ax[1] + ax[3];
ay[2] = -arrow.getY() + ay[1] + ay[3];
// begin B. Buckl
Polygon myPg = new Polygon(ax, ay, 4);
g2.setColor(bgColor);
g2.fill(myPg);
g2.setColor(fgColor);
g2.draw(myPg);
// end B. Buckl
}
else if (arrow.getString().equals("X")) {
int[] ax = new int[4];
int[] ay = new int[4];
ax[0] = arrow.getX();
ay[0] = arrow.getY();
ax[1] = arrow.getX() + arrow.getArrowEndA().x;
ay[1] = arrow.getY() + arrow.getArrowEndA().y;
ax[3] = arrow.getX() + arrow.getArrowEndB().x;
ay[3] = arrow.getY() + arrow.getArrowEndB().y;
ax[2] = -arrow.getX() + ax[1] + ax[3];
ay[2] = -arrow.getY() + ay[1] + ay[3];
g2.fill(new Polygon(ax, ay, 4));
}
// A.Mueller Begin
else if (arrow.getString().equals("n")) {
Point a1 = arrow.getCrossEndA();
Point a2 = arrow.getCrossEndB();
g2.drawLine(arrow.getX() + arrow.getArrowEndA().x,
arrow.getY() + arrow.getArrowEndA().y,
arrow.getX() + a2.x,
arrow.getY() + a2.y);
g2.drawLine(arrow.getX() + arrow.getArrowEndB().x,
arrow.getY() + arrow.getArrowEndB().y,
arrow.getX() + a1.x,
arrow.getY() + a1.y);
}
else if (arrow.getString().equals("require")) {
int width = arrow.getCrossEndB().x - arrow.getCrossEndA().x;
int height = arrow.getCrossEndB().y - arrow.getCrossEndA().y;
g2.drawOval(arrow.getX() + arrow.getCrossEndA().x, arrow.getY() + arrow.getCrossEndA().y, width, height);
}
else if (arrow.getString().equals("provide")) {
int width = arrow.getCrossEndB().x - arrow.getCrossEndA().x;
int height = arrow.getCrossEndB().y - arrow.getCrossEndA().y;
g2.drawArc(arrow.getX() + arrow.getCrossEndA().x, arrow.getY() + arrow.getCrossEndA().y, width, height, arrow.getArcStart(), arrow.getArcEnd());
// A.Mueller End
// G.Mueller Start
}
else if (arrow.getString().startsWith("package")) {
Point px1;
Point px2;
if (arrow.getString().equals("packageStart")) {
px1 = getStartPoint();
px2 = getLinePoints().elementAt(1);
}
else {
px1 = getEndPoint();
px2 = getLinePoints().elementAt(
getLinePoints().size() - 2);
}
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
int cx = px1.x;
int cy = px1.y;
double winkel = Utils.getAngle(px1.x, px1.y, px2.x, px2.y);
at2.rotate(winkel, cx, cy);
g2.setTransform(at2);
g2.setColor(bgColor);
g2.fillOval(px1.x, px1.y - (int) (10 * zoom), (int) (20 * zoom), (int) (20 * zoom));
g2.setColor(fgColor);
g2.drawOval(px1.x, px1.y - (int) (10 * zoom), (int) (20 * zoom), (int) (20 * zoom));
g2.drawLine(px1.x + (int) (10 * zoom), px1.y - (int) (5 * zoom), px1.x + (int) (10 * zoom), px1.y + (int) (5 * zoom));
g2.drawLine(px1.x + (int) (15 * zoom), px1.y, px1.x + (int) (5 * zoom), px1.y);
g2.setTransform(at);
// ***
// Wirrer G. Start
}
else if (arrow.getString().startsWith("fill_poly")) {
Point px1;
Point px2;
if (beginArrow.startsWith("fill_poly_start")) {
px1 = getStartPoint();
px2 = getLinePoints().elementAt(1);
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
double winkel = Utils.getAngle(px1.x, px1.y, px2.x, px2.y);
at2.rotate(winkel, px1.x, px1.y);
g2.setTransform(at2);
int[] x_cord = { px1.x, px1.x + (int) (13 * zoom), px1.x + (int) (13 * zoom) };
int[] y_cord = { px1.y, px1.y - (int) (7 * zoom), px1.y + (int) (7 * zoom) };
Polygon x = new Polygon(x_cord, y_cord, 3);
g2.fillPolygon(x);
g2.setTransform(at);
}
if (endArrow.startsWith("fill_poly_end")) {
px1 = getEndPoint();
px2 = getLinePoints().elementAt(getLinePoints().size() - 2);
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
double winkel = Utils.getAngle(px2.x, px2.y, px1.x, px1.y);
at2.rotate(winkel, px1.x, px1.y);
g2.setTransform(at2);
int[] x_cord = { px1.x, px1.x - (int) (13 * zoom), px1.x - (int) (13 * zoom) };
int[] y_cord = { px1.y, px1.y - (int) (7 * zoom), px1.y + (int) (7 * zoom) };
Polygon x = new Polygon(x_cord, y_cord, 3);
g2.fillPolygon(x);
g2.setTransform(at);
}
// Wirrer G. End
}
else if (arrow.getString().startsWith("comp")) {
Point px1;
Point px2;
int s;
int boxSize = (int) (20 * zoom);
// we use 5.9 and 6.9 instead of 6 and 7 to get bigger arrows if we zoom out but smaller if gridsize is 10
int arrowOneSize = (int) (5.9 * zoom);
int arrowTwoSize = (int) (6.9 * zoom);
int arrowTwoSeparator = (int) (2.4 * zoom);
int arrowThreeSize = (int) (6 * zoom);
int arrowThreeLength = (int) (12 * zoom);
// if (beginCSDArrow.equals("compStart")) {
if (beginArrow.startsWith("compStart")) {
HandlerElementMap.getHandlerForElement(this).getFontHandler().setFontSize((double) 10);
s = boxSize;
if (!csdStartText.equals("")) {
s = (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(csdStartText);
}
if (s < boxSize) {
s = boxSize;
}
px1 = getStartPoint();
px2 = getLinePoints().elementAt(1);
g2.setColor(bgColor);
g2.fillRect(px1.x - s / 2, px1.y - s / 2, s, s);
g2.setColor(fgColor);
g2.drawRect(px1.x - s / 2, px1.y - s / 2, s, s);
if (csdStartText.equals(">")) {
int[] tmpX = { px1.x - arrowOneSize, px1.x + arrowOneSize, px1.x - arrowOneSize };
int[] tmpY = { px1.y - arrowOneSize, px1.y, px1.y + arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdStartText.equals("<")) {
int[] tmpX = { px1.x + arrowOneSize, px1.x - arrowOneSize, px1.x + arrowOneSize };
int[] tmpY = { px1.y - arrowOneSize, px1.y, px1.y + arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdStartText.equals("v")) {
int[] tmpX = { px1.x - arrowOneSize, px1.x, px1.x + arrowOneSize };
int[] tmpY = { px1.y - arrowOneSize, px1.y + arrowOneSize, px1.y - arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdStartText.equals("^")) {
int[] tmpX = { px1.x - arrowOneSize, px1.x, px1.x + arrowOneSize };
int[] tmpY = { px1.y + arrowOneSize, px1.y - arrowOneSize, px1.y + arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdStartText.equals("=")) {
g2.drawLine(px1.x - arrowTwoSize, px1.y - arrowTwoSeparator, px1.x + arrowTwoSize, px1.y - arrowTwoSeparator);
g2.drawLine(px1.x + arrowTwoSize, px1.y - arrowTwoSeparator, px1.x + 1, px1.y - arrowTwoSize);
g2.drawLine(px1.x - arrowTwoSize, px1.y + arrowTwoSeparator, px1.x + arrowTwoSize, px1.y + arrowTwoSeparator);
g2.drawLine(px1.x - arrowTwoSize, px1.y + arrowTwoSeparator, px1.x - 1, px1.y + arrowTwoSize);
}
else {
if (!csdStartText.equals("")) {
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, csdStartText, px1.x, px1.y + (int) (6 * zoom), AlignHorizontal.CENTER);
}
}
if (beginArrow.equals("compStartdel")) {
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
int cx = px1.x;
int cy = px1.y;
double winkel = Utils.getAngle(px1.x, px1.y, px2.x,
px2.y);
at2.rotate(winkel, cx, cy);
g2.setTransform(at2);
g2.drawLine((int) (px1.x + s / 2 + 2 * zoom), px1.y, px1.x + s / 2 + arrowThreeLength, px1.y - arrowThreeSize);
g2.drawLine((int) (px1.x + s / 2 + 2 * zoom), px1.y, px1.x + s / 2 + arrowThreeLength, px1.y + arrowThreeSize);
g2.setTransform(at);
}
HandlerElementMap.getHandlerForElement(this).getFontHandler().resetFontSize();
}
// if (endCSDArrow.equals("compEnd")) {
if (endArrow.startsWith("compEnd")) {
HandlerElementMap.getHandlerForElement(this).getFontHandler().setFontSize(10.0);
s = boxSize;
if (!csdEndText.equals("")) {
s = (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(csdEndText);
}
if (s < boxSize) {
s = boxSize;
}
px1 = getEndPoint();
px2 = getLinePoints().elementAt(
getLinePoints().size() - 2);
g2.setColor(bgColor);
g2.fillRect(px1.x - s / 2, px1.y - s / 2, s, s);
g2.setColor(fgColor);
g2.drawRect(px1.x - s / 2, px1.y - s / 2, s, s);
if (csdEndText.equals(">")) {
int[] tmpX = { px1.x - arrowOneSize, px1.x + arrowOneSize, px1.x - arrowOneSize };
int[] tmpY = { px1.y - arrowOneSize, px1.y, px1.y + arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdEndText.equals("<")) {
int[] tmpX = { px1.x + arrowOneSize, px1.x - arrowOneSize, px1.x + arrowOneSize };
int[] tmpY = { px1.y - arrowOneSize, px1.y, px1.y + arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdEndText.equals("v")) {
int[] tmpX = { px1.x - arrowOneSize, px1.x, px1.x + arrowOneSize };
int[] tmpY = { px1.y - arrowOneSize, px1.y + arrowOneSize, px1.y - arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdEndText.equals("^")) {
int[] tmpX = { px1.x - arrowOneSize, px1.x, px1.x + arrowOneSize };
int[] tmpY = { px1.y + arrowOneSize, px1.y - arrowOneSize, px1.y + arrowOneSize };
g2.fillPolygon(tmpX, tmpY, 3);
}
else if (csdEndText.equals("=")) {
g2.drawLine(px1.x - arrowTwoSize, px1.y - arrowTwoSeparator, px1.x + arrowTwoSize, px1.y - arrowTwoSeparator);
g2.drawLine(px1.x + arrowTwoSize, px1.y - arrowTwoSeparator, px1.x + 1, px1.y - arrowTwoSize);
g2.drawLine(px1.x - arrowTwoSize, px1.y + arrowTwoSeparator, px1.x + arrowTwoSize, px1.y + arrowTwoSeparator);
g2.drawLine(px1.x - arrowTwoSize, px1.y + arrowTwoSeparator, px1.x - 1, px1.y + arrowTwoSize);
}
else {
if (!csdEndText.equals("")) {
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, csdEndText, px1.x, px1.y + (int) (6 * zoom), AlignHorizontal.CENTER);
}
}
if (endArrow.equals("compEnddel")) {
AffineTransform at = g2.getTransform();
AffineTransform at2 = (AffineTransform) at.clone();
int cx = px1.x;
int cy = px1.y;
double winkel = Utils.getAngle(px1.x, px1.y, px2.x,
px2.y);
at2.rotate(winkel, cx, cy);
g2.setTransform(at2);
g2.drawLine((int) (px1.x + s / 2 + 2 * zoom), px1.y, px1.x + s / 2 + arrowThreeLength, px1.y - arrowThreeSize);
g2.drawLine((int) (px1.x + s / 2 + 2 * zoom), px1.y, px1.x + s / 2 + arrowThreeLength, px1.y + arrowThreeSize);
g2.setTransform(at);
}
HandlerElementMap.getHandlerForElement(this).getFontHandler().resetFontSize();
}
}
// G.Mueller End
}
else if (r instanceof Multiplicity) {
Multiplicity m = (Multiplicity) r;
// g.drawRect((int)r.getX(), (int)r.getY(), (int)r.getWidth(),
// (int)r.getHeight());
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, m.getString(), r.getX(), r.getY() + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() + 2 * (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getDistanceBetweenTexts(), AlignHorizontal.LEFT); // B. Buckl
// added
// +2*this.getHandler().getDistTextToText()
}
else if (r instanceof Role) {
Role role = (Role) r;
String str = role.getString();
int position = 0;
int y = 4 * (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getDistanceBetweenTexts();
while (position != -1) {
position = str.indexOf("\\\\");
if (position != -1) {
String s = str.substring(0, position);
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, s, r.getX(), r.getY() + y, AlignHorizontal.LEFT);
y = y + (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize();
str = str.substring(position + 2, str.length());
}
else {
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, str, r.getX(), r.getY() + y, AlignHorizontal.LEFT);
}
}
// <OLDCODE>
/* this.getHandler().write(g2, role.getString(), (int) r.getX(), (int) r .getY() + this.getHandler().getFontHandler().getFontsize() + 2 this.getHandler().getDistTextToText(), false); // B. Buckl // added // +2*this.getHandler().getDistTextToText() */
// </OLDCODE>
// A.Mueller end
// G.Mueller Start
}
else if (r instanceof Port) {
Port p = (Port) r;
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, p.getString(), r.getX(), r.getY(), AlignHorizontal.LEFT);
}
// G.Mueller end
}
if (getStrings() != null) {
if (getStrings().size() > 0) {
Point start = getCenterOfLine();
int yPos = start.y - (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getDistanceBetweenTexts(); // B. Buckl
// added
// -this.getHandler().getDistTextToText()
int xPos = start.x;
for (int i = 0; i < getStrings().size(); i++) {
String s = getStrings().elementAt(i);
// A.Mueller Begin...
if (s.startsWith(">") || s.endsWith(">") || s.startsWith("<") || s.endsWith("<")) {
// starts or ends with an arrow, check if it is the only
// one..
if (s.indexOf(">") == s.lastIndexOf(">") && s.indexOf(">") != -1 || s.indexOf("<") == s.lastIndexOf("<") && s.indexOf("<") != -1) {
// decide where and what to draw...
int fontHeight = g2.getFontMetrics(
HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont()).getHeight() - g2.getFontMetrics(HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont()).getDescent() - g2.getFontMetrics(HandlerElementMap.getHandlerForElement(this).getFontHandler().getFont()).getLeading();
fontHeight = fontHeight / 3 * 2;
if (s.endsWith(">")) {
s = s.substring(0, s.length() - 1);
int fontWidth = (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(s);
xPos = xPos - (fontHeight + 4) / 2;
int startDrawX = xPos + fontWidth / 2 + 4;
Polygon temp = new Polygon();
temp.addPoint(startDrawX, yPos);
temp.addPoint(startDrawX, yPos - fontHeight);
temp.addPoint(startDrawX + fontHeight - 1, yPos - fontHeight / 2);
g2.fillPolygon(temp);
}
else if (s.endsWith("<")) {
s = s.substring(0, s.length() - 1);
int fontWidth = (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(s);
xPos = xPos - (fontHeight + 4) / 2;
int startDrawX = xPos + fontWidth / 2 + 4;
Polygon temp = new Polygon();
temp.addPoint(startDrawX + fontHeight - 1, yPos);
temp.addPoint(startDrawX + fontHeight - 1, yPos - fontHeight);
temp.addPoint(startDrawX, yPos - fontHeight / 2);
g2.fillPolygon(temp);
}
else if (s.startsWith(">")) {
s = s.substring(1, s.length());
int fontWidth = (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(s);
xPos = xPos + (fontHeight + 4) / 2;
int startDrawX = xPos - fontWidth / 2 - 4;
Polygon temp = new Polygon();
temp.addPoint(startDrawX - fontHeight + 1, yPos);
temp.addPoint(startDrawX - fontHeight + 1, yPos - fontHeight);
temp.addPoint(startDrawX, yPos - fontHeight / 2);
g2.fillPolygon(temp);
}
else if (s.startsWith("<")) {
s = s.substring(1, s.length());
int fontWidth = (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(s);
xPos = xPos + (fontHeight + 4) / 2;
int startDrawX = xPos - fontWidth / 2 - 4;
Polygon temp = new Polygon();
temp.addPoint(startDrawX, yPos);
temp.addPoint(startDrawX, yPos - fontHeight);
temp.addPoint(startDrawX - fontHeight + 1, yPos - fontHeight / 2);
g2.fillPolygon(temp);
}
}
}
// A.Mueller end...
HandlerElementMap.getHandlerForElement(this).getFontHandler().writeText(g2, s, xPos, yPos, AlignHorizontal.CENTER);
yPos += (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize();
yPos += HandlerElementMap.getHandlerForElement(this).getFontHandler().getDistanceBetweenTexts();
}
}
}
// The criticalPoints must be calculated to expand the relations size by the associated relation-text
Vector<Point> criticalPoints = new Vector<Point>();
for (int i = 1; i < startShapes.size(); i++) {
Rectangle r = startShapes.elementAt(i);
Point p1 = new Point(r.getX() - (int) (2 * zoom), r.getY() - (int) (2 * zoom));
Point p2 = new Point(r.getX() + r.getWidth() + (int) (2 * zoom),
r.getY() + r.getHeight() + (int) (2 * zoom));
criticalPoints.add(p1);
criticalPoints.add(p2);
}
for (int i = 1; i < endShapes.size(); i++) {
Rectangle r = endShapes.elementAt(i);
Point p1 = new Point(r.getX() - (int) (2 * zoom), r.getY() - (int) (2 * zoom));
Point p2 = new Point(r.getX() + r.getWidth() + (int) (2 * zoom),
r.getY() + r.getHeight() + (int) (2 * zoom));
criticalPoints.add(p1);
criticalPoints.add(p2);
}
if (getStrings() != null) {
if (getStrings().size() > 0) {
Point start = getCenterOfLine();
int yPos = start.y;
int xPos = start.x;
for (int i = 0; i < getStrings().size(); i++) {
String s = getStrings().elementAt(i);
int width = (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getTextWidth(s);
criticalPoints.add(new Point(xPos - width / 2 - (int) (20 * zoom), yPos - (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize() - (int) (20 * zoom)));
criticalPoints.add(new Point(xPos + width / 2 + (int) (20 * zoom), yPos + (int) (20 * zoom)));
yPos += (int) HandlerElementMap.getHandlerForElement(this).getFontHandler().getFontSize();
yPos += HandlerElementMap.getHandlerForElement(this).getFontHandler().getDistanceBetweenTexts();
}
}
}
/**
* Change Size of relation to fit the relations size
* Must be made to resize the relation automatically during draging an endpoint
*/
// (minx,miny) is the upper left end and (maxx,maxy) is the lower right end of the relation
int minx = Integer.MAX_VALUE;
int miny = Integer.MAX_VALUE;
int maxx = Integer.MIN_VALUE;
int maxy = Integer.MIN_VALUE;
for (int i = 0; i < getLinePoints().size(); i++) {
Point p = getLinePoints().elementAt(i);
minx = Math.min(minx, p.x);
miny = Math.min(miny, p.y);
maxx = Math.max(maxx, p.x);
maxy = Math.max(maxy, p.y);
// Subtract or add the SELECTCIRCLESIZE to avoid cutting the circles at the end of the relation
minx = (int) Math.min(minx, p.x - SELECTCIRCLESIZE * zoom);
miny = (int) Math.min(miny, p.y - SELECTCIRCLESIZE * zoom);
maxx = (int) Math.max(maxx, p.x + SELECTCIRCLESIZE * zoom);
maxy = (int) Math.max(maxy, p.y + SELECTCIRCLESIZE * zoom);
}
for (int i = 0; i < criticalPoints.size(); i++) {
Point p = criticalPoints.elementAt(i);
minx = Math.min(minx, p.x);
miny = Math.min(miny, p.y);
maxx = Math.max(maxx, p.x);
maxy = Math.max(maxy, p.y);
}
// BUGFIX ZOOM: We must consider the gridsize for the min and max value to avoid rounding errors
// Therefore we subtract or add the difference to the next possible value
int gridSize = HandlerElementMap.getHandlerForElement(this).getGridSize();
minx -= minx % gridSize;
miny -= miny % gridSize;
// Subtract gridSize another time to avoid a too small selection area for the relation-selection circle
minx -= gridSize;
miny -= gridSize;
maxx += maxx % gridSize;
maxy += maxy % gridSize;
if (maxx != 0 || maxy != 0) {
int diffx = maxx - getRectangle().width;
int diffy = maxy - getRectangle().height;
this.setSize(getRectangle().width + diffx, getRectangle().height + diffy);
}
if (minx != 0 | miny != 0) {
setLocationDifference(minx, miny);
this.setSize(getRectangle().width + -minx, getRectangle().height + -miny);
for (int i = 0; i < getLinePoints().size(); i++) {
Point p = getLinePoints().elementAt(i);
p.x += -minx;
p.y += -miny;
}
}
}
private Point getStartPoint() {
Point ret = getLinePoints().elementAt(0);
return ret;
}
private Point getEndPoint() {
Point ret = getLinePoints().elementAt(
getLinePoints().size() - 1);
return ret;
}
public PointDouble getAbsoluteCoorStart() {
PointDouble ret = new PointDouble(getRectangle().x + getStartPoint().x, getRectangle().y + getStartPoint().y);
return ret;
}
public PointDouble getAbsoluteCoorEnd() {
PointDouble ret = new PointDouble(getRectangle().x + getEndPoint().x, getRectangle().y + getEndPoint().y);
return ret;
}
// G.Mueller start
public String[] getCSDText(String str) { // for the Composite Structure Diagram Text
String[] tmp = new String[4];
int to = 0;
int from = 0;
tmp[0] = " ";
tmp[1] = " ";
tmp[2] = " ";
tmp[3] = " ";
if (str.length() > 3) {
// if (str.indexOf("<[") >=3) tmp[2] =
// str.substring(3,str.indexOf("<["));
// if (str.lastIndexOf("[") >=3 && str.lastIndexOf("[")-1 !=
// str.lastIndexOf("<[")) tmp[3] = str.substring(str.indexOf("[",
// str.length()));
from = str.indexOf("<[") + 2;
if (from >= 2) {
to = str.indexOf("]");
}
if (from >= 2 && to >= 0 && from < to) {
tmp[0] = str.substring(from, to);
}
from = str.indexOf("[", to) + 1;
if (from >= 1) {
to = str.indexOf("]>", to);
}
if (from >= 1 && to >= 0 && from < to) {
tmp[1] = str.substring(from, to);
}
}
return tmp;
}
// G.Mueller end
@Override
public StickingPolygon generateStickingBorder() { // LME
return NoStickingPolygonGenerator.INSTANCE.generateStickingBorder(getRectangle());
}
@Override
public StickingPolygon generateStickingBorder(int x, int y, int width, int height) {
return NoStickingPolygonGenerator.INSTANCE.generateStickingBorder(getRectangle());
}
public boolean allPointsOnSamePos() {
Point first = null;
for (Point p : getLinePoints()) {
if (first == null) {
first = p;
}
else {
if (first.x != p.x || first.y != p.y) {
return false;
}
}
}
return true;
}
@Override
protected Color getDefaultBackgroundColor() {
return Converter.convert(ColorOwn.WHITE);
}
@Override
public Set<Direction> getResizeArea(int x, int y) {
return Collections.<Direction> emptySet();
}
@Override
public Integer getLayer() {
return getLayerHelper(LayerFacet.DEFAULT_VALUE_RELATION);
}
}
| gpl-3.0 |
nvoron23/opensearchserver | src/main/java/com/jaeksoft/searchlib/web/DeleteServlet.java | 4812 | /**
* License Agreement for OpenSearchServer
*
* Copyright (C) 2008-2014 Emmanuel Keller / Jaeksoft
*
* http://www.open-search-server.com
*
* This file is part of OpenSearchServer.
*
* OpenSearchServer is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenSearchServer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenSearchServer.
* If not, see <http://www.gnu.org/licenses/>.
**/
package com.jaeksoft.searchlib.web;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.NoSuchAlgorithmException;
import javax.xml.xpath.XPathExpressionException;
import org.apache.http.HttpException;
import com.jaeksoft.searchlib.Client;
import com.jaeksoft.searchlib.SearchLibException;
import com.jaeksoft.searchlib.function.expression.SyntaxError;
import com.jaeksoft.searchlib.query.ParseException;
import com.jaeksoft.searchlib.request.AbstractSearchRequest;
import com.jaeksoft.searchlib.request.SearchPatternRequest;
import com.jaeksoft.searchlib.schema.SchemaField;
import com.jaeksoft.searchlib.schema.SchemaFieldList;
import com.jaeksoft.searchlib.user.Role;
import com.jaeksoft.searchlib.user.User;
import com.jaeksoft.searchlib.util.XPathParser;
public class DeleteServlet extends AbstractServlet {
/**
*
*/
private static final long serialVersionUID = -2663934578246659291L;
private int deleteUniqDoc(Client client, String field, String value)
throws NoSuchAlgorithmException, IOException, URISyntaxException,
SearchLibException, InstantiationException, IllegalAccessException,
ClassNotFoundException, HttpException {
SchemaFieldList schemaFieldList = client.getSchema().getFieldList();
SchemaField schemaField = field != null ? schemaFieldList.get(field)
: schemaFieldList.getUniqueField();
if (schemaField == null)
throw new SearchLibException("Field not found: " + field);
return client.deleteDocument(schemaField.getName(), value);
}
private int deleteByQuery(Client client, String q)
throws SearchLibException, IOException, InstantiationException,
IllegalAccessException, ClassNotFoundException, ParseException,
SyntaxError, URISyntaxException, InterruptedException {
AbstractSearchRequest request = new SearchPatternRequest(client);
request.setQueryString(q);
return client.deleteDocuments(request);
}
@Override
protected void doRequest(ServletTransaction transaction)
throws ServletException {
try {
String indexName = transaction.getIndexName();
User user = transaction.getLoggedUser();
if (user != null && !user.hasRole(indexName, Role.INDEX_UPDATE))
throw new SearchLibException("Not permitted");
Client client = transaction.getClient();
String uniq = transaction.getParameterString("uniq");
String q = transaction.getParameterString("q");
Integer result = null;
if (uniq != null)
result = deleteUniqDoc(client, null, uniq);
else if (q != null)
result = deleteByQuery(client, q);
transaction.addXmlResponse("Status", "OK");
transaction.addXmlResponse("Deleted", result.toString());
} catch (Exception e) {
throw new ServletException(e);
}
}
public static boolean delete(URI uri, String indexName, String login,
String apikey, String uniqueField, int secTimeOut)
throws SearchLibException {
try {
XPathParser xpp = call(
secTimeOut,
buildUri(uri, "/delete", indexName, login, apikey, "uniq="
+ uniqueField));
return "OK".equals(xpp
.getNodeString("/response/entry[@key='Status'"));
} catch (IllegalStateException e) {
throw new SearchLibException(e);
} catch (URISyntaxException e) {
throw new SearchLibException(e);
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
}
}
public static boolean deleteDocument(URI uri, String indexName,
String login, String apikey, int docId, int secTimeOut)
throws SearchLibException {
try {
XPathParser xpp = call(
secTimeOut,
buildUri(uri, "/delete", indexName, login, apikey, "id="
+ docId));
return "OK".equals(xpp
.getNodeString("/response/entry[@key='Status'"));
} catch (SearchLibException e) {
throw new SearchLibException(e);
} catch (URISyntaxException e) {
throw new SearchLibException(e);
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
}
}
}
| gpl-3.0 |
nalimleinad/Battlegear2 | src/api/java/atomicstryker/dynamiclights/client/IDynamicLightSource.java | 1405 | package atomicstryker.dynamiclights.client;
import net.minecraft.entity.Entity;
/**
*
* @author AtomicStryker
*
* Usage of this class:
* You can use this Interface to create your own Dynamic Light Sources to use with Dynamic Lights.
* Implement this interface, write code as you see fit, and then use the method:
*
* DynamicLights.addLightSource(IDynamicLightSource yourLightObjectHere) to get Dynamic Lights to use it.
*
* When you want your Light to turn off, simply call
*
* DynamicLights.removeLightSource(IDynamicLightSource yourLightObjectHere)
*
*
* You could also just return a Light Level <1, but that would leave your Light in the aggressively iterated List -
* please do not do that.
*
*/
public interface IDynamicLightSource
{
/**
* Entity the Dynamic Light Source is associated with.
* The Light will always be centered on this Entity and move with it.
* Any Entity can only be associated with a single Light!
* If the Entity is dead (eg. Entity.isDead() returns true), the Light will be removed aswell.
*/
public Entity getAttachmentEntity();
/**
* Values above 15 will not be considered, 15 is the MC max level. Values below 1 are considered disabled.
* Values can be changed on the fly.
* @return int value of Minecraft Light level at the Dynamic Light Source
*/
public int getLightLevel();
}
| gpl-3.0 |
kevin-wayne/algs4 | src/main/java/edu/princeton/cs/algs4/TopologicalX.java | 11467 | /******************************************************************************
* Compilation: javac TopologicalX.java
* Execution: java TopologicalX V E F
* Dependencies: Queue.java Digraph.java
*
* Compute topological ordering of a DAG using queue-based algorithm.
* Runs in O(E + V) time.
*
******************************************************************************/
package edu.princeton.cs.algs4;
/**
* The {@code TopologicalX} class represents a data type for
* determining a topological order of a <em>directed acyclic graph</em> (DAG).
* A digraph has a topological order if and only if it is a DAG.
* The <em>hasOrder</em> operation determines whether the digraph has
* a topological order, and if so, the <em>order</em> operation
* returns one.
* <p>
* This implementation uses a nonrecursive, queue-based algorithm.
* The constructor takes Θ(<em>V</em> + <em>E</em>) time in the worst
* case, where <em>V</em> is the number of vertices and <em>E</em>
* is the number of edges.
* Each instance method takes Θ(1) time.
* It uses Θ(<em>V</em>) extra space (not including the digraph).
* <p>
* See {@link DirectedCycle}, {@link DirectedCycleX}, and
* {@link EdgeWeightedDirectedCycle} to compute a
* directed cycle if the digraph is not a DAG.
* See {@link Topological} for a recursive version that uses depth-first search.
* <p>
* For additional documentation,
* see <a href="https://algs4.cs.princeton.edu/42digraph">Section 4.2</a> of
* <i>Algorithms, 4th Edition</i> by Robert Sedgewick and Kevin Wayne.
*
* @author Robert Sedgewick
* @author Kevin Wayne
*/
public class TopologicalX {
private Queue<Integer> order; // vertices in topological order
private int[] ranks; // ranks[v] = order where vertex v appers in order
/**
* Determines whether the digraph {@code G} has a topological order and, if so,
* finds such a topological order.
* @param G the digraph
*/
public TopologicalX(Digraph G) {
// indegrees of remaining vertices
int[] indegree = new int[G.V()];
for (int v = 0; v < G.V(); v++) {
indegree[v] = G.indegree(v);
}
// initialize
ranks = new int[G.V()];
order = new Queue<Integer>();
int count = 0;
// initialize queue to contain all vertices with indegree = 0
Queue<Integer> queue = new Queue<Integer>();
for (int v = 0; v < G.V(); v++)
if (indegree[v] == 0) queue.enqueue(v);
while (!queue.isEmpty()) {
int v = queue.dequeue();
order.enqueue(v);
ranks[v] = count++;
for (int w : G.adj(v)) {
indegree[w]--;
if (indegree[w] == 0) queue.enqueue(w);
}
}
// there is a directed cycle in subgraph of vertices with indegree >= 1.
if (count != G.V()) {
order = null;
}
assert check(G);
}
/**
* Determines whether the edge-weighted digraph {@code G} has a
* topological order and, if so, finds such a topological order.
* @param G the digraph
*/
public TopologicalX(EdgeWeightedDigraph G) {
// indegrees of remaining vertices
int[] indegree = new int[G.V()];
for (int v = 0; v < G.V(); v++) {
indegree[v] = G.indegree(v);
}
// initialize
ranks = new int[G.V()];
order = new Queue<Integer>();
int count = 0;
// initialize queue to contain all vertices with indegree = 0
Queue<Integer> queue = new Queue<Integer>();
for (int v = 0; v < G.V(); v++)
if (indegree[v] == 0) queue.enqueue(v);
while (!queue.isEmpty()) {
int v = queue.dequeue();
order.enqueue(v);
ranks[v] = count++;
for (DirectedEdge e : G.adj(v)) {
int w = e.to();
indegree[w]--;
if (indegree[w] == 0) queue.enqueue(w);
}
}
// there is a directed cycle in subgraph of vertices with indegree >= 1.
if (count != G.V()) {
order = null;
}
assert check(G);
}
/**
* Returns a topological order if the digraph has a topologial order,
* and {@code null} otherwise.
* @return a topological order of the vertices (as an interable) if the
* digraph has a topological order (or equivalently, if the digraph is a DAG),
* and {@code null} otherwise
*/
public Iterable<Integer> order() {
return order;
}
/**
* Does the digraph have a topological order?
* @return {@code true} if the digraph has a topological order (or equivalently,
* if the digraph is a DAG), and {@code false} otherwise
*/
public boolean hasOrder() {
return order != null;
}
/**
* The the rank of vertex {@code v} in the topological order;
* -1 if the digraph is not a DAG
*
* @param v vertex
* @return the position of vertex {@code v} in a topological order
* of the digraph; -1 if the digraph is not a DAG
* @throws IllegalArgumentException unless {@code 0 <= v < V}
*/
public int rank(int v) {
validateVertex(v);
if (hasOrder()) return ranks[v];
else return -1;
}
// certify that digraph is acyclic
private boolean check(Digraph G) {
// digraph is acyclic
if (hasOrder()) {
// check that ranks are a permutation of 0 to V-1
boolean[] found = new boolean[G.V()];
for (int i = 0; i < G.V(); i++) {
found[rank(i)] = true;
}
for (int i = 0; i < G.V(); i++) {
if (!found[i]) {
System.err.println("No vertex with rank " + i);
return false;
}
}
// check that ranks provide a valid topological order
for (int v = 0; v < G.V(); v++) {
for (int w : G.adj(v)) {
if (rank(v) > rank(w)) {
System.err.printf("%d-%d: rank(%d) = %d, rank(%d) = %d\n",
v, w, v, rank(v), w, rank(w));
return false;
}
}
}
// check that order() is consistent with rank()
int r = 0;
for (int v : order()) {
if (rank(v) != r) {
System.err.println("order() and rank() inconsistent");
return false;
}
r++;
}
}
return true;
}
// certify that digraph is acyclic
private boolean check(EdgeWeightedDigraph G) {
// digraph is acyclic
if (hasOrder()) {
// check that ranks are a permutation of 0 to V-1
boolean[] found = new boolean[G.V()];
for (int i = 0; i < G.V(); i++) {
found[rank(i)] = true;
}
for (int i = 0; i < G.V(); i++) {
if (!found[i]) {
System.err.println("No vertex with rank " + i);
return false;
}
}
// check that ranks provide a valid topological order
for (int v = 0; v < G.V(); v++) {
for (DirectedEdge e : G.adj(v)) {
int w = e.to();
if (rank(v) > rank(w)) {
System.err.printf("%d-%d: rank(%d) = %d, rank(%d) = %d\n",
v, w, v, rank(v), w, rank(w));
return false;
}
}
}
// check that order() is consistent with rank()
int r = 0;
for (int v : order()) {
if (rank(v) != r) {
System.err.println("order() and rank() inconsistent");
return false;
}
r++;
}
}
return true;
}
// throw an IllegalArgumentException unless {@code 0 <= v < V}
private void validateVertex(int v) {
int V = ranks.length;
if (v < 0 || v >= V)
throw new IllegalArgumentException("vertex " + v + " is not between 0 and " + (V-1));
}
/**
* Unit tests the {@code TopologicalX} data type.
*
* @param args the command-line arguments
*/
public static void main(String[] args) {
// create random DAG with V vertices and E edges; then add F random edges
int V = Integer.parseInt(args[0]);
int E = Integer.parseInt(args[1]);
int F = Integer.parseInt(args[2]);
Digraph G1 = DigraphGenerator.dag(V, E);
// corresponding edge-weighted digraph
EdgeWeightedDigraph G2 = new EdgeWeightedDigraph(V);
for (int v = 0; v < G1.V(); v++)
for (int w : G1.adj(v))
G2.addEdge(new DirectedEdge(v, w, 0.0));
// add F extra edges
for (int i = 0; i < F; i++) {
int v = StdRandom.uniform(V);
int w = StdRandom.uniform(V);
G1.addEdge(v, w);
G2.addEdge(new DirectedEdge(v, w, 0.0));
}
StdOut.println(G1);
StdOut.println();
StdOut.println(G2);
// find a directed cycle
TopologicalX topological1 = new TopologicalX(G1);
if (!topological1.hasOrder()) {
StdOut.println("Not a DAG");
}
// or give topologial sort
else {
StdOut.print("Topological order: ");
for (int v : topological1.order()) {
StdOut.print(v + " ");
}
StdOut.println();
}
// find a directed cycle
TopologicalX topological2 = new TopologicalX(G2);
if (!topological2.hasOrder()) {
StdOut.println("Not a DAG");
}
// or give topologial sort
else {
StdOut.print("Topological order: ");
for (int v : topological2.order()) {
StdOut.print(v + " ");
}
StdOut.println();
}
}
}
/******************************************************************************
* Copyright 2002-2020, Robert Sedgewick and Kevin Wayne.
*
* This file is part of algs4.jar, which accompanies the textbook
*
* Algorithms, 4th edition by Robert Sedgewick and Kevin Wayne,
* Addison-Wesley Professional, 2011, ISBN 0-321-57351-X.
* http://algs4.cs.princeton.edu
*
*
* algs4.jar is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* algs4.jar is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with algs4.jar. If not, see http://www.gnu.org/licenses.
******************************************************************************/
| gpl-3.0 |
Rachel-Ding/weiciyuan-for-eclipse | src/org/qii/weiciyuan/support/database/dbUpgrade/Upgrade35to36.java | 483 | package org.qii.weiciyuan.support.database.dbUpgrade;
import org.qii.weiciyuan.support.database.DatabaseHelper;
import org.qii.weiciyuan.support.database.table.AtUsersTable;
import android.database.sqlite.SQLiteDatabase;
/**
* User: qii
* Date: 14-4-8
*/
public class Upgrade35to36 {
public static void upgrade(SQLiteDatabase db) {
db.execSQL("DROP TABLE IF EXISTS " + AtUsersTable.TABLE_NAME);
db.execSQL(DatabaseHelper.CREATE_ATUSERS_TABLE_SQL);
}
}
| gpl-3.0 |
malin1993ml/h-store | third_party/cpp/berkeleydb/test/java/compat/src/com/sleepycat/bind/tuple/test/MarshalledObject.java | 3225 | /*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002, 2015 Oracle and/or its affiliates. All rights reserved.
*
*/
package com.sleepycat.bind.tuple.test;
import com.sleepycat.bind.tuple.MarshalledTupleEntry;
import com.sleepycat.bind.tuple.MarshalledTupleKeyEntity;
import com.sleepycat.bind.tuple.TupleInput;
import com.sleepycat.bind.tuple.TupleOutput;
/**
* @author Mark Hayes
*/
public class MarshalledObject
implements MarshalledTupleEntry, MarshalledTupleKeyEntity {
private String data;
private String primaryKey;
private String indexKey1;
private String indexKey2;
public MarshalledObject() {
}
MarshalledObject(String data, String primaryKey,
String indexKey1, String indexKey2) {
this.data = data;
this.primaryKey = primaryKey;
this.indexKey1 = indexKey1;
this.indexKey2 = indexKey2;
}
String getData() {
return data;
}
String getPrimaryKey() {
return primaryKey;
}
String getIndexKey1() {
return indexKey1;
}
String getIndexKey2() {
return indexKey2;
}
int expectedDataLength() {
return data.length() + 1 +
indexKey1.length() + 1 +
indexKey2.length() + 1;
}
int expectedKeyLength() {
return primaryKey.length() + 1;
}
public void marshalEntry(TupleOutput dataOutput) {
dataOutput.writeString(data);
dataOutput.writeString(indexKey1);
dataOutput.writeString(indexKey2);
}
public void unmarshalEntry(TupleInput dataInput) {
data = dataInput.readString();
indexKey1 = dataInput.readString();
indexKey2 = dataInput.readString();
}
public void marshalPrimaryKey(TupleOutput keyOutput) {
keyOutput.writeString(primaryKey);
}
public void unmarshalPrimaryKey(TupleInput keyInput) {
primaryKey = keyInput.readString();
}
public boolean marshalSecondaryKey(String keyName, TupleOutput keyOutput) {
if ("1".equals(keyName)) {
if (indexKey1.length() > 0) {
keyOutput.writeString(indexKey1);
return true;
} else {
return false;
}
} else if ("2".equals(keyName)) {
if (indexKey1.length() > 0) {
keyOutput.writeString(indexKey2);
return true;
} else {
return false;
}
} else {
throw new IllegalArgumentException("Unknown keyName: " + keyName);
}
}
public boolean nullifyForeignKey(String keyName) {
if ("1".equals(keyName)) {
if (indexKey1.length() > 0) {
indexKey1 = "";
return true;
} else {
return false;
}
} else if ("2".equals(keyName)) {
if (indexKey1.length() > 0) {
indexKey2 = "";
return true;
} else {
return false;
}
} else {
throw new IllegalArgumentException("Unknown keyName: " + keyName);
}
}
}
| gpl-3.0 |
Severed-Infinity/technium | build/tmp/recompileMc/sources/net/minecraft/command/CommandNotFoundException.java | 389 | package net.minecraft.command;
public class CommandNotFoundException extends CommandException
{
public CommandNotFoundException()
{
this("commands.generic.notFound");
}
public CommandNotFoundException(String message, Object... args)
{
super(message, args);
}
public synchronized Throwable fillInStackTrace()
{
return this;
}
} | gpl-3.0 |
buehner/momo3-backend | src/main/java/de/terrestris/momo/converter/MomoLayerIdResolver.java | 712 | package de.terrestris.momo.converter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import de.terrestris.momo.model.MomoLayer;
import de.terrestris.shogun2.converter.PersistentObjectIdResolver;
import de.terrestris.shogun2.dao.LayerDao;
import de.terrestris.shogun2.service.LayerService;
/**
*
* @author Nils Buehner
* @author Daniel Koch
*
*/
public class MomoLayerIdResolver<E extends MomoLayer, D extends LayerDao<E>, S extends LayerService<E, D>> extends
PersistentObjectIdResolver<E, D, S> {
@Override
@Autowired
@Qualifier("momoLayerService")
public void setService(S service) {
this.service = service;
}
}
| gpl-3.0 |
i62lotor/geowe-core | src/main/java/org/geowe/client/local/layermanager/toolbar/RasterLayerToolBar.java | 2079 | /*
* #%L
* GeoWE Project
* %%
* Copyright (C) 2015 - 2016 GeoWE.org
* %%
* This file is part of GeoWE.org.
*
* GeoWE is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GeoWE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GeoWE. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.geowe.client.local.layermanager.toolbar;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.New;
import javax.inject.Inject;
import org.geowe.client.local.layermanager.tool.DeleteLayerTool;
import org.geowe.client.local.layermanager.tool.InfoRasterTool;
import org.geowe.client.local.layermanager.tool.create.AddRasterLayerTool;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.sencha.gxt.core.client.resources.ThemeStyles;
import com.sencha.gxt.widget.core.client.ContentPanel;
@ApplicationScoped
public class RasterLayerToolBar extends ContentPanel {
@Inject @New
private DeleteLayerTool deleteLayerTool;
@Inject @New
private InfoRasterTool infoRasterTool;
@Inject
private AddRasterLayerTool addWMSLayerTool;
private final HorizontalPanel horizontalGroup;
public RasterLayerToolBar(){
super();
setHeaderVisible(false);
addStyleName(ThemeStyles.get().style().borderBottom());
horizontalGroup = new HorizontalPanel();
horizontalGroup.setSpacing(5);
}
@PostConstruct
private void initialize(){
horizontalGroup.add(addWMSLayerTool);
horizontalGroup.add(deleteLayerTool);
horizontalGroup.add(infoRasterTool);
setWidget(horizontalGroup);
}
}
| gpl-3.0 |
MartyParty21/AwakenDreamsClient | mcp/temp/src/minecraft/net/minecraft/client/renderer/BlockRendererDispatcher.java | 5341 | package net.minecraft.client.renderer;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.renderer.BlockFluidRenderer;
import net.minecraft.client.renderer.BlockModelRenderer;
import net.minecraft.client.renderer.BlockModelShapes;
import net.minecraft.client.renderer.ChestRenderer;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.renderer.VertexBuffer;
import net.minecraft.client.renderer.block.model.IBakedModel;
import net.minecraft.client.renderer.block.model.SimpleBakedModel;
import net.minecraft.client.renderer.color.BlockColors;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.client.resources.IResourceManager;
import net.minecraft.client.resources.IResourceManagerReloadListener;
import net.minecraft.crash.CrashReport;
import net.minecraft.crash.CrashReportCategory;
import net.minecraft.util.EnumBlockRenderType;
import net.minecraft.util.ReportedException;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.WorldType;
public class BlockRendererDispatcher implements IResourceManagerReloadListener {
private final BlockModelShapes field_175028_a;
private final BlockModelRenderer field_175027_c;
private final ChestRenderer field_175024_d = new ChestRenderer();
private final BlockFluidRenderer field_175025_e;
public BlockRendererDispatcher(BlockModelShapes p_i46577_1_, BlockColors p_i46577_2_) {
this.field_175028_a = p_i46577_1_;
this.field_175027_c = new BlockModelRenderer(p_i46577_2_);
this.field_175025_e = new BlockFluidRenderer(p_i46577_2_);
}
public BlockModelShapes func_175023_a() {
return this.field_175028_a;
}
public void func_175020_a(IBlockState p_175020_1_, BlockPos p_175020_2_, TextureAtlasSprite p_175020_3_, IBlockAccess p_175020_4_) {
if(p_175020_1_.func_185901_i() == EnumBlockRenderType.MODEL) {
p_175020_1_ = p_175020_1_.func_185899_b(p_175020_4_, p_175020_2_);
IBakedModel ibakedmodel = this.field_175028_a.func_178125_b(p_175020_1_);
IBakedModel ibakedmodel1 = (new SimpleBakedModel.Builder(p_175020_1_, ibakedmodel, p_175020_3_, p_175020_2_)).func_177645_b();
this.field_175027_c.func_178267_a(p_175020_4_, ibakedmodel1, p_175020_1_, p_175020_2_, Tessellator.func_178181_a().func_178180_c(), true);
}
}
public boolean func_175018_a(IBlockState p_175018_1_, BlockPos p_175018_2_, IBlockAccess p_175018_3_, VertexBuffer p_175018_4_) {
try {
EnumBlockRenderType enumblockrendertype = p_175018_1_.func_185901_i();
if(enumblockrendertype == EnumBlockRenderType.INVISIBLE) {
return false;
} else {
if(p_175018_3_.func_175624_G() != WorldType.field_180272_g) {
try {
p_175018_1_ = p_175018_1_.func_185899_b(p_175018_3_, p_175018_2_);
} catch (Exception var8) {
;
}
}
switch(enumblockrendertype) {
case MODEL:
return this.field_175027_c.func_178267_a(p_175018_3_, this.func_184389_a(p_175018_1_), p_175018_1_, p_175018_2_, p_175018_4_, true);
case ENTITYBLOCK_ANIMATED:
return false;
case LIQUID:
return this.field_175025_e.func_178270_a(p_175018_3_, p_175018_1_, p_175018_2_, p_175018_4_);
default:
return false;
}
}
} catch (Throwable throwable) {
CrashReport crashreport = CrashReport.func_85055_a(throwable, "Tesselating block in world");
CrashReportCategory crashreportcategory = crashreport.func_85058_a("Block being tesselated");
CrashReportCategory.func_180523_a(crashreportcategory, p_175018_2_, p_175018_1_.func_177230_c(), p_175018_1_.func_177230_c().func_176201_c(p_175018_1_));
throw new ReportedException(crashreport);
}
}
public BlockModelRenderer func_175019_b() {
return this.field_175027_c;
}
public IBakedModel func_184389_a(IBlockState p_184389_1_) {
return this.field_175028_a.func_178125_b(p_184389_1_);
}
public void func_175016_a(IBlockState p_175016_1_, float p_175016_2_) {
EnumBlockRenderType enumblockrendertype = p_175016_1_.func_185901_i();
if(enumblockrendertype != EnumBlockRenderType.INVISIBLE) {
switch(enumblockrendertype) {
case MODEL:
IBakedModel ibakedmodel = this.func_184389_a(p_175016_1_);
this.field_175027_c.func_178266_a(ibakedmodel, p_175016_1_, p_175016_2_, true);
break;
case ENTITYBLOCK_ANIMATED:
this.field_175024_d.func_178175_a(p_175016_1_.func_177230_c(), p_175016_2_);
case LIQUID:
}
}
}
public boolean func_184388_a(Block p_184388_1_) {
if(p_184388_1_ == null) {
return false;
} else {
EnumBlockRenderType enumblockrendertype = p_184388_1_.func_176223_P().func_185901_i();
return enumblockrendertype == EnumBlockRenderType.MODEL?false:enumblockrendertype == EnumBlockRenderType.ENTITYBLOCK_ANIMATED;
}
}
public void func_110549_a(IResourceManager p_110549_1_) {
this.field_175025_e.func_178268_a();
}
}
| gpl-3.0 |
jacekwasilewski/RankSys | RankSys-diversity/src/main/java/es/uam/eps/ir/ranksys/diversity/sales/metrics/GiniSimpsonIndex.java | 1493 | /*
* Copyright (C) 2015 Information Retrieval Group at Universidad Autónoma
* de Madrid, http://ir.ii.uam.es
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package es.uam.eps.ir.ranksys.diversity.sales.metrics;
import es.uam.eps.ir.ranksys.metrics.rank.NoDiscountModel;
import es.uam.eps.ir.ranksys.metrics.rel.NoRelevanceModel;
/**
* Gini-Simpson index sales diversity metric. It is actually a relevance and rank-unaware version of {@link EIUDC} with a proportional mapping.
*
* S. Vargas. Novelty and diversity evaluation and enhancement in Recommender Systems. PhD Thesis.
*
* @author Saúl Vargas (saul.vargas@uam.es)
* @author Pablo Castells (pablo.castells@uam.es)
*
* @param <U> type of the users
* @param <I> type of the items
*/
public class GiniSimpsonIndex<U, I> extends EIUDC<U, I> {
/**
* Constructor.
*
* @param cutoff maximum length of the recommendation lists that is evaluated
*/
public GiniSimpsonIndex(int cutoff) {
super(cutoff, new NoDiscountModel(), new NoRelevanceModel<>());
}
/**
* Evaluates the metric for the recommendations added so far.
*
* @return result of the metric for the recommendations previously added
*/
@Override
public double evaluate() {
return (super.evaluate() - 1) / cutoff + 1;
}
}
| gpl-3.0 |
MadMarty/madsonic-5.5 | src/github/madmarty/madsonic/service/parser/JukeboxStatusParser.java | 1994 | /*
This file is part of Subsonic.
Subsonic is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Subsonic is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Subsonic. If not, see <http://www.gnu.org/licenses/>.
Copyright 2009 (C) Sindre Mehus
*/
package github.madmarty.madsonic.service.parser;
import java.io.Reader;
import org.xmlpull.v1.XmlPullParser;
import android.content.Context;
import github.madmarty.madsonic.domain.JukeboxStatus;
/**
* @author Sindre Mehus
*/
public class JukeboxStatusParser extends AbstractParser {
public JukeboxStatusParser(Context context) {
super(context);
}
public JukeboxStatus parse(Reader reader) throws Exception {
init(reader);
JukeboxStatus jukeboxStatus = new JukeboxStatus();
int eventType;
do {
eventType = nextParseEvent();
if (eventType == XmlPullParser.START_TAG) {
String name = getElementName();
if ("jukeboxPlaylist".equals(name) || "jukeboxStatus".equals(name)) {
jukeboxStatus.setPositionSeconds(getInteger("position"));
jukeboxStatus.setCurrentIndex(getInteger("currentIndex"));
jukeboxStatus.setPlaying(getBoolean("playing"));
jukeboxStatus.setGain(getFloat("gain"));
} else if ("error".equals(name)) {
handleError();
}
}
} while (eventType != XmlPullParser.END_DOCUMENT);
validate();
return jukeboxStatus;
}
} | gpl-3.0 |
metasfresh/metasfresh-webui | src/main/java/de/metas/ui/web/bankstatement_reconciliation/BankStatementLineRows.java | 3081 | package de.metas.ui.web.bankstatement_reconciliation;
import java.util.List;
import org.adempiere.util.lang.impl.TableRecordReferenceSet;
import org.compiere.model.I_C_BankStatementLine;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import de.metas.banking.BankStatementLineId;
import de.metas.ui.web.view.template.IRowsData;
import de.metas.ui.web.view.template.SynchronizedRowsIndexHolder;
import de.metas.ui.web.window.datatypes.DocumentId;
import de.metas.ui.web.window.datatypes.DocumentIdsSelection;
import lombok.Builder;
import lombok.NonNull;
/*
* #%L
* metasfresh-webui-api
* %%
* Copyright (C) 2020 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
public class BankStatementLineRows implements IRowsData<BankStatementLineRow>
{
public static BankStatementLineRows cast(final IRowsData<BankStatementLineRow> rowsData)
{
return (BankStatementLineRows)rowsData;
}
private final BankStatementLineAndPaymentsToReconcileRepository repository;
private final SynchronizedRowsIndexHolder<BankStatementLineRow> rowsHolder;
@Builder
private BankStatementLineRows(
@NonNull final BankStatementLineAndPaymentsToReconcileRepository repository,
@NonNull final List<BankStatementLineRow> rows)
{
this.repository = repository;
this.rowsHolder = SynchronizedRowsIndexHolder.of(rows);
}
@Override
public ImmutableMap<DocumentId, BankStatementLineRow> getDocumentId2TopLevelRows()
{
return rowsHolder.getDocumentId2TopLevelRows();
}
@Override
public DocumentIdsSelection getDocumentIdsToInvalidate(@NonNull final TableRecordReferenceSet recordRefs)
{
return recordRefs.streamIds(I_C_BankStatementLine.Table_Name, BankStatementLineId::ofRepoId)
.map(BankStatementLineRow::convertBankStatementLineIdToDocumentId)
.filter(rowsHolder.isRelevantForRefreshingByDocumentId())
.collect(DocumentIdsSelection.toDocumentIdsSelection());
}
@Override
public void invalidateAll()
{
invalidate(DocumentIdsSelection.ALL);
}
@Override
public void invalidate(final DocumentIdsSelection rowIds)
{
final ImmutableSet<BankStatementLineId> bankStatementLineIds = rowsHolder
.getRecordIdsToRefresh(rowIds, BankStatementLineRow::convertDocumentIdToBankStatementLineId);
final List<BankStatementLineRow> newRows = repository.getBankStatementLineRowsByIds(bankStatementLineIds);
rowsHolder.compute(rows -> rows.replacingRows(rowIds, newRows));
}
}
| gpl-3.0 |
ylyang/mipa | src/net/sourceforge/mipa/eca/Listener.java | 1068 | /*
* MIPA - Middleware Infrastructure for Predicate detection in Asynchronous
* environments
*
* Copyright (C) 2009 the original author or authors.
*
* This program is free software: you can redistribute it and/or modify
* it under the term of the GNU General Public License as published by
* the Free Software Foundation, either version 3, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sourceforge.mipa.eca;
import java.io.Serializable;
/**
* the action interface of ECA mechanism.
*
* @author Jianping Yu <jianp.yue@gmail.com>
*/
public interface Listener extends Serializable {
public void update(String eventName, String value);
}
| gpl-3.0 |
amyvmiwei/miwei_temp | java/hypertable-common/src/main/java/org/hypertable/FsBroker/Lib/ResponseCallbackPositionRead.java | 1861 | /**
* Copyright (C) 2007-2015 Hypertable, Inc.
*
* This file is part of Hypertable.
*
* Hypertable is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 3
* of the License, or any later version.
*
* Hypertable is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
package org.hypertable.FsBroker.Lib;
import org.hypertable.AsyncComm.Comm;
import org.hypertable.AsyncComm.CommBuf;
import org.hypertable.AsyncComm.CommHeader;
import org.hypertable.AsyncComm.Event;
import org.hypertable.AsyncComm.ResponseCallback;
import org.hypertable.Common.Error;
import org.hypertable.Common.Serialization;
public class ResponseCallbackPositionRead extends ResponseCallback {
ResponseCallbackPositionRead(Comm comm, Event event) {
super(comm, event);
}
static final byte VERSION = 1;
public int response(long offset, int nread, byte [] data) {
CommHeader header = new CommHeader();
header.initialize_from_request_header(mEvent.header);
CommBuf cbuf = new CommBuf(header,
5 + Serialization.EncodedLengthVInt32(12) + 12,
data, nread);
cbuf.AppendInt(Error.OK);
cbuf.AppendByte(VERSION);
Serialization.EncodeVInt32(cbuf.data, 12);
cbuf.AppendLong(offset);
cbuf.AppendInt(nread);
return mComm.SendResponse(mEvent.addr, cbuf);
}
}
| gpl-3.0 |
apavlo/h-store | third_party/cpp/berkeleydb/examples/java/src/persist/DplDump.java | 4314 | /*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002, 2015 Oracle and/or its affiliates. All rights reserved.
*
* $Id$
*/
package persist;
import java.io.File;
import java.io.FileNotFoundException;
import com.sleepycat.db.DatabaseException;
import com.sleepycat.db.Environment;
import com.sleepycat.db.EnvironmentConfig;
import com.sleepycat.persist.EntityCursor;
import com.sleepycat.persist.EntityStore;
import com.sleepycat.persist.PrimaryIndex;
import com.sleepycat.persist.StoreConfig;
import com.sleepycat.persist.model.EntityMetadata;
import com.sleepycat.persist.model.EntityModel;
import com.sleepycat.persist.raw.RawObject;
import com.sleepycat.persist.raw.RawStore;
import com.sleepycat.persist.raw.RawType;
/**
* Dumps a store or all stores to standard output in raw XML format. This
* sample is intended to be modifed to dump in application specific ways.
* @see #usage
*/
public class DplDump {
private File envHome;
private String storeName;
private boolean dumpMetadata;
private Environment env;
public static void main(String[] args) {
try {
DplDump dump = new DplDump(args);
dump.open();
dump.dump();
dump.close();
} catch (Throwable e) {
e.printStackTrace();
System.exit(1);
}
}
private DplDump(String[] args) {
for (int i = 0; i < args.length; i += 1) {
String name = args[i];
String val = null;
if (i < args.length - 1 && !args[i + 1].startsWith("-")) {
i += 1;
val = args[i];
}
if (name.equals("-h")) {
if (val == null) {
usage("No value after -h");
}
envHome = new File(val);
} else if (name.equals("-s")) {
if (val == null) {
usage("No value after -s");
}
storeName = val;
} else if (name.equals("-meta")) {
dumpMetadata = true;
} else {
usage("Unknown arg: " + name);
}
}
if (storeName == null) {
usage("-s not specified");
}
if (envHome == null) {
usage("-h not specified");
}
}
private void usage(String msg) {
if (msg != null) {
System.out.println(msg);
}
System.out.println
("usage:" +
"\njava " + DplDump.class.getName() +
"\n -h <envHome>" +
"\n # Environment home directory" +
"\n [-meta]" +
"\n # Dump metadata; default: false" +
"\n -s <storeName>" +
"\n # Store to dump");
System.exit(2);
}
private void open()
throws DatabaseException, FileNotFoundException {
EnvironmentConfig envConfig = new EnvironmentConfig();
envConfig.setInitializeCache(true);
envConfig.setInitializeLocking(true);
env = new Environment(envHome, envConfig);
}
private void close()
throws DatabaseException {
env.close();
}
private void dump()
throws DatabaseException {
StoreConfig storeConfig = new StoreConfig();
storeConfig.setReadOnly(true);
RawStore store = new RawStore(env, storeName, storeConfig);
EntityModel model = store.getModel();
for (String clsName : model.getKnownClasses()) {
EntityMetadata meta = model.getEntityMetadata(clsName);
if (meta != null) {
if (dumpMetadata) {
for (RawType type : model.getAllRawTypeVersions(clsName)) {
System.out.println(type);
}
} else {
PrimaryIndex<Object,RawObject> index =
store.getPrimaryIndex(clsName);
EntityCursor<RawObject> entities = index.entities();
for (RawObject entity : entities) {
System.out.println(entity);
}
entities.close();
}
}
}
store.close();
}
}
| gpl-3.0 |
MartyParty21/AwakenDreamsClient | mcp/src/minecraft/net/minecraft/client/gui/GuiOptionsRowList.java | 4802 | package net.minecraft.client.gui;
import com.google.common.collect.Lists;
import java.util.List;
import net.minecraft.client.Minecraft;
import net.minecraft.client.settings.GameSettings;
public class GuiOptionsRowList extends GuiListExtended
{
private final List<GuiOptionsRowList.Row> options = Lists.<GuiOptionsRowList.Row>newArrayList();
public GuiOptionsRowList(Minecraft mcIn, int p_i45015_2_, int p_i45015_3_, int p_i45015_4_, int p_i45015_5_, int p_i45015_6_, GameSettings.Options... p_i45015_7_)
{
super(mcIn, p_i45015_2_, p_i45015_3_, p_i45015_4_, p_i45015_5_, p_i45015_6_);
this.centerListVertically = false;
for (int i = 0; i < p_i45015_7_.length; i += 2)
{
GameSettings.Options gamesettings$options = p_i45015_7_[i];
GameSettings.Options gamesettings$options1 = i < p_i45015_7_.length - 1 ? p_i45015_7_[i + 1] : null;
GuiButton guibutton = this.createButton(mcIn, p_i45015_2_ / 2 - 155, 0, gamesettings$options);
GuiButton guibutton1 = this.createButton(mcIn, p_i45015_2_ / 2 - 155 + 160, 0, gamesettings$options1);
this.options.add(new GuiOptionsRowList.Row(guibutton, guibutton1));
}
}
private GuiButton createButton(Minecraft mcIn, int p_148182_2_, int p_148182_3_, GameSettings.Options options)
{
if (options == null)
{
return null;
}
else
{
int i = options.returnEnumOrdinal();
return (GuiButton)(options.getEnumFloat() ? new GuiOptionSlider(i, p_148182_2_, p_148182_3_, options) : new GuiOptionButton(i, p_148182_2_, p_148182_3_, options, mcIn.gameSettings.getKeyBinding(options)));
}
}
/**
* Gets the IGuiListEntry object for the given index
*/
public GuiOptionsRowList.Row getListEntry(int index)
{
return (GuiOptionsRowList.Row)this.options.get(index);
}
protected int getSize()
{
return this.options.size();
}
/**
* Gets the width of the list
*/
public int getListWidth()
{
return 400;
}
protected int getScrollBarX()
{
return super.getScrollBarX() + 32;
}
public static class Row implements GuiListExtended.IGuiListEntry
{
private final Minecraft client = Minecraft.getMinecraft();
private final GuiButton buttonA;
private final GuiButton buttonB;
public Row(GuiButton buttonAIn, GuiButton buttonBIn)
{
this.buttonA = buttonAIn;
this.buttonB = buttonBIn;
}
public void drawEntry(int slotIndex, int x, int y, int listWidth, int slotHeight, int mouseX, int mouseY, boolean isSelected)
{
if (this.buttonA != null)
{
this.buttonA.yPosition = y;
this.buttonA.drawButton(this.client, mouseX, mouseY);
}
if (this.buttonB != null)
{
this.buttonB.yPosition = y;
this.buttonB.drawButton(this.client, mouseX, mouseY);
}
}
public boolean mousePressed(int slotIndex, int mouseX, int mouseY, int mouseEvent, int relativeX, int relativeY)
{
if (this.buttonA.mousePressed(this.client, mouseX, mouseY))
{
if (this.buttonA instanceof GuiOptionButton)
{
this.client.gameSettings.setOptionValue(((GuiOptionButton)this.buttonA).returnEnumOptions(), 1);
this.buttonA.displayString = this.client.gameSettings.getKeyBinding(GameSettings.Options.getEnumOptions(this.buttonA.id));
}
return true;
}
else if (this.buttonB != null && this.buttonB.mousePressed(this.client, mouseX, mouseY))
{
if (this.buttonB instanceof GuiOptionButton)
{
this.client.gameSettings.setOptionValue(((GuiOptionButton)this.buttonB).returnEnumOptions(), 1);
this.buttonB.displayString = this.client.gameSettings.getKeyBinding(GameSettings.Options.getEnumOptions(this.buttonB.id));
}
return true;
}
else
{
return false;
}
}
public void mouseReleased(int slotIndex, int x, int y, int mouseEvent, int relativeX, int relativeY)
{
if (this.buttonA != null)
{
this.buttonA.mouseReleased(x, y);
}
if (this.buttonB != null)
{
this.buttonB.mouseReleased(x, y);
}
}
public void setSelected(int p_178011_1_, int p_178011_2_, int p_178011_3_)
{
}
}
}
| gpl-3.0 |
jgrivolla/dkpro-wsd-gpl | de.tudarmstadt.ukp.dkpro.wsd.supervised.ims-gpl/src/main/java/sg/edu/nus/comp/nlp/ims/implement/CTester.java | 15772 | /*******************************************************************************
* IMS (It Makes Sense) -- NUS WSD System
* Copyright (c) 2013 National University of Singapore.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
******************************************************************************/
package sg.edu.nus.comp.nlp.ims.implement;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Hashtable;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import sg.edu.nus.comp.nlp.ims.classifiers.CLibLinearEvaluator;
import sg.edu.nus.comp.nlp.ims.classifiers.IEvaluator;
import sg.edu.nus.comp.nlp.ims.corpus.*;
import sg.edu.nus.comp.nlp.ims.feature.*;
import sg.edu.nus.comp.nlp.ims.instance.*;
import sg.edu.nus.comp.nlp.ims.io.*;
import sg.edu.nus.comp.nlp.ims.lexelt.*;
import sg.edu.nus.comp.nlp.ims.util.*;
/**
* main interface of test.
*
* @author zhongzhi
*
*/
public class CTester {
// default instance extractor class name
protected static final String INSTANCEEXTRACTOR = CInstanceExtractor.class
.getName();
// default feature extractor class name
protected static final String FEATUREEXTRACTOR = CFeatureExtractorCombination.class
.getName();
// default corpus class name
protected static final String CORPUS = CLexicalCorpus.class.getName();
// evaluator
protected IEvaluator m_Evaluator = new CLibLinearEvaluator();
// writer
protected IResultWriter m_Writer = new CResultWriter();
// results
protected ArrayList<Object> m_Results = new ArrayList<Object>();
// instance extractor class name
protected String m_InstanceExtractorName = INSTANCEEXTRACTOR;
// feature extractor class name
protected String m_FeatureExtractorName = FEATUREEXTRACTOR;
// corpus class name
protected String m_CorpusName = CORPUS;
// delimiter
protected String m_Delimiter = null;
// sentence split
protected boolean m_Split = false;
// tokenized
protected boolean m_Tokenized = false;
// lemmatized
protected boolean m_Lemmatized = false;
// pos tagged
protected boolean m_POSTagged = false;
/**
* test xml file
*
* @param p_XmlFile
* test file
* @throws Exception
* test exception
*/
public void test(String p_XmlFile) throws Exception {
Reader reader = new InputStreamReader(new FileInputStream(p_XmlFile));
this.test(reader);
reader.close();
}
/**
* test a xml file with given lexelt ids for each test instance
*
* @param p_XmlFile
* test file
* @param p_LexeltFile
* lexelt id of each instances
* @throws Exception
* test exception
*/
public void test(String p_XmlFile, String p_LexeltFile) throws Exception {
String line = null;
StringTokenizer tokenizer = null;
Hashtable<String, ArrayList<String>> instanceLexeltIDs = new Hashtable<String, ArrayList<String>>();
BufferedReader lexeltReader = new BufferedReader(new InputStreamReader(
new FileInputStream(p_LexeltFile)));
while ((line = lexeltReader.readLine()) != null) {
tokenizer = new StringTokenizer(line);
if (tokenizer.countTokens() < 2) {
lexeltReader.close();
}
String id = tokenizer.nextToken();
ArrayList<String> lexeltIDs = new ArrayList<String>();
while (tokenizer.hasMoreTokens()) {
lexeltIDs.add(tokenizer.nextToken());
}
instanceLexeltIDs.put(id, lexeltIDs);
}
lexeltReader.close();
Reader reader = new InputStreamReader(new FileInputStream(p_XmlFile));
this.test(reader, instanceLexeltIDs);
reader.close();
}
/**
* test
*
* @param p_XmlReader
* test file reader
* @throws Exception
* test exceptoin
*/
public void test(Reader p_XmlReader) throws Exception {
this.test(p_XmlReader, null);
}
/**
* test
*
* @param p_XmlReader
* test file reader
* @param p_InstanceLexeltIDs
* instace lexelt ids
* @throws Exception
* test exception
*/
public void test(Reader p_XmlReader, Hashtable<String, ArrayList<String>> p_InstanceLexeltIDs)
throws Exception {
IInstanceExtractor instExtractor = (IInstanceExtractor) Class.forName(
this.m_InstanceExtractorName).newInstance();
IFeatureExtractor featExtractor = (IFeatureExtractor) Class.forName(
this.m_FeatureExtractorName).newInstance();
ACorpus corpus = (ACorpus) Class.forName(this.m_CorpusName)
.newInstance();
if (this.m_Delimiter != null) {
corpus.setDelimiter(this.m_Delimiter);
}
corpus.setSplit(this.m_Split);
corpus.setTokenized(this.m_Tokenized);
corpus.setPOSTagged(this.m_POSTagged);
corpus.setLemmatized(this.m_Lemmatized);
corpus.load(p_XmlReader);
if (this.m_Writer != null && CPlainCorpusResultWriter.class.isInstance(this.m_Writer)) {
((CPlainCorpusResultWriter)this.m_Writer).setCorpus(corpus);
}
instExtractor.setCorpus(corpus);
instExtractor.setFeatureExtractor(featExtractor);
Hashtable<String, ILexelt> lexelts = new Hashtable<String, ILexelt>();
while (instExtractor.hasNext()) {
IInstance instance = instExtractor.next();
String lexeltID = instance.getLexeltID();
if (p_InstanceLexeltIDs != null) {
if (p_InstanceLexeltIDs.containsKey(instance.getID())) {
ArrayList<String> ids = p_InstanceLexeltIDs.get(instance
.getID());
for (int i = 0; i < ids.size(); i++) {
lexeltID = ids.get(i);
if (!lexelts.containsKey(lexeltID)) {
lexelts.put(lexeltID, new CLexelt(lexeltID));
}
lexelts.get(lexeltID).addInstance(instance);
}
} else {
throw new Exception("instance \"" + instance.getID()
+ "\" is not defined in lexelt file.");
}
} else {
if (!lexelts.containsKey(lexeltID)) {
lexelts.put(lexeltID, new CLexelt(lexeltID));
}
lexelts.get(lexeltID).addInstance(instance);
}
}
ArrayList<String> lexeltIDs = new ArrayList<String>();
lexeltIDs.addAll(lexelts.keySet());
Collections.sort(lexeltIDs);
for (String lexeltID : lexeltIDs) {
System.err.println(lexeltID);
Object lexelt = lexelts.remove(lexeltID);
this.m_Results.add(this.m_Evaluator.evaluate(lexelt));
}
}
/**
* get results
*
* @return results
*/
public ArrayList<Object> getResults() {
return this.m_Results;
}
/**
* whether the input is already split
* @param p_Split whether split
*/
public void setSplit(boolean p_Split) {
this.m_Split = p_Split;
}
/**
* whether sentences are already tokenized
* @param p_Tokenized whether tokenized
*/
public void setTokenized(boolean p_Tokenized) {
this.m_Tokenized = p_Tokenized;
}
/**
* whether the pos info is provided
* @param p_POSTagged whether pos tagged
*/
public void setPOSTagged(boolean p_POSTagged) {
this.m_POSTagged = p_POSTagged;
}
/**
* whether the lemma info is provided
* @param p_Lemmatized whether lemmatized
*/
public void setLemmatized(boolean p_Lemmatized) {
this.m_Lemmatized = p_Lemmatized;
}
/**
* set the delimiter
* @param p_Delimiter delimiter
*/
public void setDelimiter(String p_Delimiter) {
this.m_Delimiter = p_Delimiter;
}
/**
* set evaluator
*
* @param p_Evaluator
* evaluator
*/
public void setEvaluator(IEvaluator p_Evaluator) {
this.m_Evaluator = p_Evaluator;
}
/**
* set writer
*
* @param p_Writer
* writer
*/
public void setWriter(IResultWriter p_Writer) {
this.m_Writer = p_Writer;
}
/**
* set the corpus class name
*
* @param p_Name
* corpus class name
*/
public void setCorpusClassName(String p_Name) {
this.m_CorpusName = p_Name;
}
/**
* set the instance extractor name
*
* @param p_Name
* instance extractor name
*/
public void setInstanceExtractorName(String p_Name) {
this.m_InstanceExtractorName = p_Name;
}
/**
* set the feature extractor name
*
* @param p_Name
* feature extractor name
*/
public void setFeatureExtractorName(String p_Name) {
this.m_FeatureExtractorName = p_Name;
}
/**
* write result
*
* @throws IOException
* exception while write
*/
public void write() throws IOException {
this.m_Writer.write(this.m_Results);
}
/**
* clear results
*/
public void clear() {
this.m_Results.clear();
}
/**
* @param p_Args
* arguments
*/
public static void main(String[] p_Args) {
try {
String generalOptions = "Usage: testPath modelDir statisticDir saveDir\n"
+ "\t-i class name of Instance Extractor(default sg.edu.nus.comp.nlp.ims.instance.CInstanceExtractor)\n"
+ "\t-f class name of Feature Extractor(default sg.edu.nus.comp.nlp.ims.feature.CFeatureExtractorCombination)\n"
+ "\t-c class name of Corpus(default sg.edu.nus.comp.nlp.ims.corpus.CLexicalCorpus)\n"
+ "\t-e class name of Evaluator(default sg.edu.nus.comp.nlp.ims.classifiers.CLibLinearEvaluator)\n"
+ "\t-r class name of Result Writer(default sg.edu.nus.comp.nlp.ims.io.CResultWriter)\n"
+ "\t-lexelt path of lexelt file\n"
+ "\t-is path of index.sense(option)\n"
+ "\t-prop path of prop.xml for JWNL\n"
+ "\t-split 1/0 whether the corpus is sentence splitted(default 0)\n"
+ "\t-ssm path of sentence splitter model\n"
+ "\t-token 1/0 whether the corpus is tokenized(default 0)\n"
+ "\t-pos 1/0 whether the pos tag is provided in corpus(default 0)\n"
+ "\t-ptm path POS tagger model\n"
+ "\t-dict path of dictionary for opennlp POS tagger(option)\n"
+ "\t-tagdict path of tagdict for POS tagger(option)\n"
+ "\t-lemma 1/0 whether the lemma is provided in the corpus(default 0)\n"
+ "\t-delimiter the delimiter to separate tokens, lemmas and POS tags (default \"/\")\n"
+ "\t-type type of testPath\n"
+ "\t\tdirectory: test all xml files under directory testPath\n"
+ "\t\tlist: test all files listed in file testPath\n"
+ "\t\tfile(default): test file testPath\n";
CArgumentManager argmgr = new CArgumentManager(p_Args);
if (argmgr.size() != 4) { // check arguments
throw new IllegalArgumentException(generalOptions);
}
CTester tester = new CTester();
String type = "file";
File testPath = new File(argmgr.get(0));
String modelDir = argmgr.get(1);
String statDir = argmgr.get(2);
String saveDir = argmgr.get(3);
String evaluatorName = CLibLinearEvaluator.class.getName();
String writerName = CResultWriter.class.getName();
String lexeltFile = null;
if (argmgr.has("lexelt")) {
lexeltFile = argmgr.get("lexelt");
}
if (argmgr.has("type")) {
type = argmgr.get("type");
}
// initial JWordNet
if (!argmgr.has("prop")) {
System.err.println("prop.xml file for JWNL has not been set.");
throw new IllegalArgumentException(generalOptions);
}
CJWNL.initial(new FileInputStream(argmgr.get("prop")));
// set sentence splitter
if (argmgr.has("split") && Integer.parseInt(argmgr.get("split")) == 1) {
tester.setSplit(true);
}
if (argmgr.has("ssm")) {
COpenNLPSentenceSplitter.setDefaultModel(argmgr.get("ssm"));
}
if (argmgr.has("token") && Integer.parseInt(argmgr.get("token")) == 1) {
tester.setTokenized(true);
}
// set pos tagger
if (argmgr.has("pos") && Integer.parseInt(argmgr.get("pos")) == 1) {
tester.setPOSTagged(true);
tester.setTokenized(true);
}
if (argmgr.has("ptm")) {
COpenNLPPOSTagger.setDefaultModel(argmgr.get("ptm"));
}
if (argmgr.has("dict")) {
COpenNLPPOSTagger.setDefaultDictionary(argmgr.get("dict"));
}
if (argmgr.has("tagdict")) {
COpenNLPPOSTagger.setDefaultPOSDictionary(argmgr.get("tagdict"));
}
if (argmgr.has("lemma") && Integer.parseInt(argmgr.get("lemma")) == 1) {
tester.setLemmatized(true);
tester.setTokenized(true);
}
if (argmgr.has("delimiter")) {
tester.setDelimiter(argmgr.get("delimiter"));
}
// set evaluator
if (argmgr.has("e")) {
evaluatorName = argmgr.get("e");
}
IEvaluator evaluator = (IEvaluator) Class.forName(evaluatorName)
.newInstance();
if (argmgr.has("l") && argmgr.has("permanent")) {
evaluator.setOptions(new String[]{"-m", modelDir, "-s", statDir, "-l", argmgr.get("l"), "-permanent", argmgr.get("permanent")});
} else if (argmgr.has("l")) {
evaluator.setOptions(new String[]{"-m", modelDir, "-s", statDir, "-l", argmgr.get("l")});
} else {
evaluator.setOptions(new String[]{"-m", modelDir, "-s", statDir});
}
evaluator.setOptions(new String[] { "-m", modelDir, "-s", statDir });
if (argmgr.has("is")) {
ISenseIndex senseIndex = new CWordNetSenseIndex(argmgr
.get("is"));
evaluator.setSenseIndex(senseIndex);
}
// set result writer
if (argmgr.has("r")) {
writerName = argmgr.get("r");
}
IResultWriter writer = (IResultWriter) Class.forName(writerName)
.newInstance();
writer.setOptions(new String[] { "-s", saveDir });
tester.setEvaluator(evaluator);
tester.setWriter(writer);
if (argmgr.has("i")) {
tester.setInstanceExtractorName(argmgr.get("i"));
}
if (argmgr.has("f")) {
tester.setFeatureExtractorName(argmgr.get("f"));
}
if (argmgr.has("c")) {
tester.setCorpusClassName(argmgr.get("c"));
}
Pattern xmlPattern = Pattern.compile("([^\\/]*)\\.xml$");
Matcher matcher = null;
ArrayList<File> testFiles = new ArrayList<File>();
if (type.equals("list")) { // in file
String line = null;
BufferedReader reader = new BufferedReader(new InputStreamReader(
new FileInputStream(testPath)));
while ((line = reader.readLine()) != null) {
testFiles.add(new File(line));
}
reader.close();
} else if (type.equals("directory")) {
if (!testPath.exists() || !testPath.isDirectory()) {
throw new Exception("Error: cannot not find test path "
+ testPath.getName() + "!\n");
}
File[] files = testPath.listFiles();
for (File file : files) {
matcher = xmlPattern.matcher(file.getAbsolutePath());
if (matcher.find()) {
testFiles.add(file);
}
}
} else {
testFiles.add(testPath);
}
for (File testFile : testFiles) {
System.err.println("testing " + testFile.getAbsolutePath());
if (lexeltFile != null) {
tester.test(testFile.getAbsolutePath(), lexeltFile);
} else {
tester.test(testFile.getAbsolutePath());
}
System.err.println("writing results");
tester.write();
tester.clear();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
| gpl-3.0 |
MilosKozak/AndroidAPS | app/src/main/java/info/nightscout/androidaps/plugins/pump/medtronic/defs/MedtronicCommandType.java | 17704 | package info.nightscout.androidaps.plugins.pump.medtronic.defs;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import info.nightscout.androidaps.R;
import info.nightscout.androidaps.plugins.pump.medtronic.comm.message.MessageBody;
import info.nightscout.androidaps.plugins.pump.medtronic.comm.message.PumpAckMessageBody;
import info.nightscout.androidaps.plugins.pump.medtronic.comm.message.UnknownMessageBody;
/**
* Taken from GNU Gluco Control diabetes management software (ggc.sourceforge.net)
* <p>
* Description: Medtronic Commands (Pump and CGMS) for all 512 and later models (just 5xx)
* <p>
* Link to original/unmodified file:
* https://sourceforge.net/p/ggc/code/HEAD/tree/trunk/ggc-plugins/ggc-plugins-base/src/
* main/java/ggc/plugin/device/impl/minimed/enums/MinimedCommandType.java
* <p>
* A lot of stuff has been removed because it is not needed anymore (historical stuff from CareLink
* and Carelink USB communication.
* <p>
* Author: Andy {andy@atech-software.com}
*/
public enum MedtronicCommandType implements Serializable // , MinimedCommandTypeInterface
{
InvalidCommand(0, "Invalid Command", null, null), //
// Pump Responses (9)
CommandACK(0x06, "ACK - Acknowledge", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters), //
CommandNAK(0x15, "NAK - Not Acknowledged", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters), //
// All (8)
PushAck(91, "Push ACK", MedtronicDeviceType.All, MinimedCommandParameterType.FixedParameters, getByteArray(2)), //
PushEsc(91, "Push Esc", MedtronicDeviceType.All, MinimedCommandParameterType.FixedParameters, getByteArray(1)), //
PushButton(0x5b, "Push Button", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters), // 91
RFPowerOn(93, "RF Power On", MedtronicDeviceType.All, MinimedCommandParameterType.FixedParameters, getByteArray(
1, 10)), //
RFPowerOff(93, "RF Power Off", MedtronicDeviceType.All, MinimedCommandParameterType.FixedParameters, getByteArray(
0, 0)), //
// SetSuspend(77, "Set Suspend", MinimedTargetType.InitCommand, MedtronicDeviceType.All,
// MinimedCommandParameterType.FixedParameters, getByteArray(1)), //
// CancelSuspend(77, "Cancel Suspend", MinimedTargetType.InitCommand, MedtronicDeviceType.All,
// MinimedCommandParameterType.FixedParameters, getByteArray(0)), //
PumpState(131, "Pump State", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters), //
ReadPumpErrorStatus(117, "Pump Error Status", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters), //
// 511 (InitCommand = 2, Config 7, Data = 1(+3)
// DetectBolus(75, "Detect Bolus", MedtronicDeviceType.Medtronic_511, MinimedCommandParameterType.FixedParameters, getByteArray(
// 0, 0, 0)), //
// RemoteControlIds(118, "Remote Control Ids", MinimedTargetType.PumpConfiguration_NA, MedtronicDeviceType.All,
// MinimedCommandParameterType.NoParameters), //
// FirmwareVersion(116, "Firmware Version", MinimedTargetType.InitCommand, MedtronicDeviceType.All,
// MinimedCommandParameterType.NoParameters), //
// PumpId(113, "Pump Id", MinimedTargetType.PumpConfiguration, MedtronicDeviceType.All,
// MinimedCommandParameterType.NoParameters), // init
SetRealTimeClock(0x40, "Set Pump Time", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters, //
0), //
GetRealTimeClock(112, "Get Pump Time", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters, //
7, R.string.medtronic_cmd_desc_get_time), // 0x70
GetBatteryStatus(0x72, "Get Battery Status", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters), //
// GetBattery((byte) 0x72), //
GetRemainingInsulin(0x73, "Read Remaining Insulin", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters, 2), // 115
SetBolus(0x42, "Set Bolus", MedtronicDeviceType.All, MinimedCommandParameterType.NoParameters, //
0, R.string.medtronic_cmd_desc_set_bolus), // 66
// 512
ReadTemporaryBasal(0x98, "Read Temporary Basal", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
5, R.string.medtronic_cmd_desc_get_tbr), // 152
SetTemporaryBasal(76, "Set Temporay Basal", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
0, R.string.medtronic_cmd_desc_set_tbr),
// 512 Config
PumpModel(141, "Pump Model", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
5, R.string.medtronic_cmd_desc_get_model), // 0x8D
// BGTargets_512(140, "BG Targets", MinimedTargetType.PumpConfiguration, MedtronicDeviceType.Medtronic_512_712,
// MinimedCommandParameterType.NoParameters), //
// BGUnits(137, "BG Units", MinimedTargetType.PumpConfiguration, MedtronicDeviceType.Medtronic_512andHigher,
// MinimedCommandParameterType.NoParameters), //
// Language(134, "Language", MinimedTargetType.PumpConfiguration, MedtronicDeviceType.Medtronic_512andHigher,
// MinimedCommandParameterType.NoParameters), //
Settings_512(145, "Configuration", MedtronicDeviceType.Medtronic_512_712, MinimedCommandParameterType.NoParameters, //
64, 1, 18, R.string.medtronic_cmd_desc_get_settings), //
// BGAlarmClocks(142, "BG Alarm Clocks", MinimedTargetType.PumpConfiguration,
// MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters), //
// BGAlarmEnable(151, "BG Alarm Enable", MinimedTargetType.PumpConfiguration,
// MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters), //
// BGReminderEnable(144, "BG Reminder Enable", MinimedTargetType.PumpConfiguration,
// MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters), //
// ReadInsulinSensitivities(0x8b, "Read Insulin Sensitivities", MinimedTargetType.PumpConfiguration,
// MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters), // 139
// 512 Data
GetHistoryData(128, "Get History", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.SubCommands, //
1024, 16, 1024, R.string.medtronic_cmd_desc_get_history), // 0x80
GetBasalProfileSTD(146, "Get Profile Standard", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
64, 3, 192, R.string.medtronic_cmd_desc_get_basal_profile), // 146
GetBasalProfileA(147, "Get Profile A", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
64, 3, 192, R.string.medtronic_cmd_desc_get_basal_profile),
GetBasalProfileB(148, "Get Profile B", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
64, 3, 192, R.string.medtronic_cmd_desc_get_basal_profile), // 148
SetBasalProfileSTD(0x6f, "Set Profile Standard", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
64, 3, 192, R.string.medtronic_cmd_desc_set_basal_profile), // 111
SetBasalProfileA(0x30, "Set Profile A", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
64, 3, 192, R.string.medtronic_cmd_desc_set_basal_profile), // 48
SetBasalProfileB(0x31, "Set Profile B", MedtronicDeviceType.Medtronic_512andHigher, MinimedCommandParameterType.NoParameters, //
64, 3, 192, R.string.medtronic_cmd_desc_set_basal_profile), // 49
// 515
PumpStatus(206, "Pump Status", MedtronicDeviceType.Medtronic_515andHigher, MinimedCommandParameterType.NoParameters), // PumpConfiguration
Settings(192, "Configuration", MedtronicDeviceType.Medtronic_515andHigher, MinimedCommandParameterType.NoParameters, //
64, 1, 21, R.string.medtronic_cmd_desc_get_settings), //
// 522
SensorSettings_522(153, "Sensor Configuration", MedtronicDeviceType.Medtronic_522andHigher, MinimedCommandParameterType.NoParameters), //
GlucoseHistory(154, "Glucose History", MedtronicDeviceType.Medtronic_522andHigher, MinimedCommandParameterType.SubCommands, 1024, 32, 0, null), //
// 523
SensorSettings(207, "Sensor Configuration", MedtronicDeviceType.Medtronic_523andHigher, MinimedCommandParameterType.NoParameters), //
// 553
// 554
// var MESSAGES = {
// READ_TIME : 0x70,
// READ_BATTERY_STATUS: 0x72,
// READ_HISTORY : 0x80,
// READ_CARB_RATIOS : 0x8A,
// READ_INSULIN_SENSITIVITIES: 0x8B,
// READ_MODEL : 0x8D,
// READ_PROFILE_STD : 0x92,
// READ_PROFILE_A : 0x93,
// READ_PROFILE_B : 0x94,
// READ_CBG_HISTORY: 0x9A,
// READ_ISIG_HISTORY: 0x9B,
// READ_CURRENT_PAGE : 0x9D,
// READ_BG_TARGETS : 0x9F,
// READ_SETTINGS : 0xC0, 192
// READ_CURRENT_CBG_PAGE : 0xCD
// };
// Fake Commands
CancelTBR(),
;
static Map<Byte, MedtronicCommandType> mapByCode;
static {
MedtronicCommandType.RFPowerOn.maxAllowedTime = 17000;
MedtronicCommandType.RFPowerOn.allowedRetries = 0;
MedtronicCommandType.RFPowerOn.recordLength = 0;
MedtronicCommandType.RFPowerOn.minimalBufferSizeToStartReading = 1;
mapByCode = new HashMap<>();
for (MedtronicCommandType medtronicCommandType : values()) {
mapByCode.put(medtronicCommandType.getCommandCode(), medtronicCommandType);
}
}
public byte commandCode = 0;
public String commandDescription = "";
public byte[] commandParameters = null;
public int commandParametersCount = 0;
public int maxRecords = 1;
private Integer resourceId;
public int command_type = 0;
public int allowedRetries = 2;
public int maxAllowedTime = 2000;
public MinimedCommandParameterType parameterType;
public int minimalBufferSizeToStartReading = 14;
public int expectedLength = 0;
//MinimedTargetType targetType;
MedtronicDeviceType devices;
private int recordLength = 64;
MedtronicCommandType() {
// this is for "fake" commands needed by AAPS MedtronicUITask
}
// MedtronicCommandType(int code, String description, MedtronicDeviceType devices,
// MinimedCommandParameterType parameterType) {
// this(code, description, devices, parameterType, 64, 1, 0, 0, 0, 0);
// }
//
//
// MedtronicCommandType(int code, String description, MedtronicDeviceType devices,
// MinimedCommandParameterType parameterType, int expectedLength) {
// this(code, description, devices, parameterType, 64, 1, 0, 0, 0, expectedLength);
// }
//
//
// MedtronicCommandType(int code, String description, MedtronicDeviceType devices,
// MinimedCommandParameterType parameterType, int recordLength, int maxRecords, int commandType) {
// this(code, description, devices, parameterType, recordLength, maxRecords, 0, 0, commandType, 0);
// }
//
//
// MedtronicCommandType(int code, String description, MedtronicDeviceType devices,
// MinimedCommandParameterType parameterType, int recordLength, int maxRecords, int commandType,
// int expectedLength) {
// this(code, description, devices, parameterType, recordLength, maxRecords, 0, 0, commandType,
// expectedLength);
// }
//
//
MedtronicCommandType(int code, String description, MedtronicDeviceType devices,
MinimedCommandParameterType parameterType, byte[] cmd_params) {
this(code, description, devices, parameterType, 0, 1, 0, 0, 11, 0);
this.commandParameters = cmd_params;
this.commandParametersCount = cmd_params.length;
}
MedtronicCommandType(int code, String description, MedtronicDeviceType devices, //
MinimedCommandParameterType parameterType) {
this(code, description, devices, parameterType, 64, 1, 0, null);
}
// NEW
MedtronicCommandType(int code, String description, MedtronicDeviceType devices,
MinimedCommandParameterType parameterType, int recordLength, int maxRecords, int commandType) {
this(code, description, devices, parameterType, recordLength, maxRecords, 0, null);
}
// NEW
MedtronicCommandType(int code, String description, MedtronicDeviceType devices, //
MinimedCommandParameterType parameterType, int expectedLength) {
this(code, description, devices, parameterType, 64, 1, expectedLength, null);
}
// NEW
MedtronicCommandType(int code, String description, MedtronicDeviceType devices, //
MinimedCommandParameterType parameterType, int expectedLength, int resourceId) {
this(code, description, devices, parameterType, 64, 1, expectedLength, resourceId);
}
// NEW
MedtronicCommandType(int code, String description,
MedtronicDeviceType devices, //
MinimedCommandParameterType parameterType, int recordLength, int max_recs, int expectedLength,
Integer resourceId) {
this.commandCode = (byte) code;
this.commandDescription = description;
this.devices = devices;
this.recordLength = recordLength;
this.maxRecords = max_recs;
this.resourceId = resourceId;
this.commandParametersCount = 0;
this.allowedRetries = 2;
this.parameterType = parameterType;
this.expectedLength = expectedLength;
if (this.parameterType == MinimedCommandParameterType.SubCommands) {
this.minimalBufferSizeToStartReading = 200;
}
}
@Deprecated
MedtronicCommandType(int code, String description, MedtronicDeviceType devices, //
MinimedCommandParameterType parameterType, int recordLength, int max_recs, int addy, //
int addy_len, int cmd_type, int expectedLength) {
this.commandCode = (byte) code;
this.commandDescription = description;
//this.targetType = targetType;
this.devices = devices;
this.recordLength = recordLength;
this.maxRecords = max_recs;
this.command_type = cmd_type;
this.commandParametersCount = 0;
this.allowedRetries = 2;
this.parameterType = parameterType;
this.expectedLength = expectedLength;
if (this.parameterType == MinimedCommandParameterType.SubCommands) {
this.minimalBufferSizeToStartReading = 200;
}
}
private static HashMap<MedtronicDeviceType, String> getDeviceTypesArray(MedtronicDeviceType... types) {
HashMap<MedtronicDeviceType, String> hashMap = new HashMap<MedtronicDeviceType, String>();
for (MedtronicDeviceType type : types) {
hashMap.put(type, null);
}
return hashMap;
}
private static byte[] getByteArray(int... data) {
byte[] array = new byte[data.length];
for (int i = 0; i < data.length; i++) {
array[i] = (byte) data[i];
}
return array;
}
private static int[] getIntArray(int... data) {
return data;
}
public static MedtronicCommandType getByCode(byte code) {
if (mapByCode.containsKey(code)) {
return mapByCode.get(code);
} else {
return MedtronicCommandType.InvalidCommand;
}
}
public static MessageBody constructMessageBody(MedtronicCommandType messageType, byte[] bodyData) {
switch (messageType) {
case CommandACK:
return new PumpAckMessageBody(bodyData);
default:
return new UnknownMessageBody(bodyData);
}
}
public static MedtronicCommandType getSettings(MedtronicDeviceType medtronicPumpModel) {
if (MedtronicDeviceType.isSameDevice(medtronicPumpModel, MedtronicDeviceType.Medtronic_512_712))
return MedtronicCommandType.Settings_512;
else
return MedtronicCommandType.Settings;
}
/**
* Get Full Command Description
*
* @return command description
*/
public String getFullCommandDescription() {
return "Command [name=" + this.name() + ", id=" + this.commandCode + ",description=" + this.commandDescription
+ "] ";
}
public boolean canReturnData() {
System.out.println("CanReturnData: ]id=" + this.name() + "max=" + this.maxRecords + "recLen=" + recordLength);
return (this.maxRecords * this.recordLength) > 0;
}
public int getRecordLength() {
return recordLength;
}
public int getMaxRecords() {
return maxRecords;
}
public byte getCommandCode() {
return commandCode;
}
public int getCommandParametersCount() {
if (this.commandParameters == null) {
return 0;
} else {
return this.commandParameters.length;
}
}
public byte[] getCommandParameters() {
return commandParameters;
}
public boolean hasCommandParameters() {
return (getCommandParametersCount() > 0);
}
public String toString() {
return name();
}
public String getCommandDescription() {
return this.commandDescription;
}
public Integer getResourceId() {
return resourceId;
}
public enum MinimedCommandParameterType {
NoParameters, //
FixedParameters, //
SubCommands //
}
}
| agpl-3.0 |
tdefilip/opennms | opennms-config-api/src/main/java/org/opennms/netmgt/config/api/DatabaseSchemaConfig.java | 2413 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2011-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.config.api;
import java.util.List;
import org.opennms.netmgt.config.filter.DatabaseSchema;
import org.opennms.netmgt.config.filter.Table;
/**
* An interface for EventdConfigManager
*
* @author <a href="ryan@mail1.opennms.com"> Ryan Lambeth </a>
*
*/
public interface DatabaseSchemaConfig {
/**
* <p>getDatabaseSchema</p>
*
* @return a DatabaseSchema
*/
DatabaseSchema getDatabaseSchema();
/**
* <p>getPrimaryTable</p>
*
* @return a Table
*/
Table getPrimaryTable();
/**
* <p>getTableByName</p>
*
* @param a String
* @return a Table
*/
Table getTableByName(final String name);
/**
* <p>findTableByVisibleColumn</p>
*
* @param a String
* @return a Table
*/
Table findTableByVisibleColumn(final String colName);
/**
* <p>getTableCount</p>
*
* @return an int
*/
int getTableCount();
/**
* <p>getJoinTables</p>
*
* @param a List of Tables
* @return a List of Strings
*/
List<String> getJoinTables(final List<Table> tables);
/**
* <p>constructJoinExprForTables</p>
*
* @param a List of Tables
* @return a String
*/
String constructJoinExprForTables(final List<Table> tables);
}
| agpl-3.0 |
PaulLuchyn/libreplan | libreplan-business/src/test/java/org/libreplan/business/test/planner/daos/SubcontractorCommunicationDAOTest.java | 10102 | /*
* This file is part of LibrePlan
*
* Copyright (C) 2011 WirelessGalicia, S.L.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.libreplan.business.test.planner.daos;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.libreplan.business.BusinessGlobalNames.BUSINESS_SPRING_CONFIG_FILE;
import static org.libreplan.business.test.BusinessGlobalNames.BUSINESS_SPRING_CONFIG_TEST_FILE;
import java.util.Collections;
import java.util.Date;
import java.util.UUID;
import org.hibernate.SessionFactory;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.libreplan.business.calendars.daos.IBaseCalendarDAO;
import org.libreplan.business.calendars.entities.BaseCalendar;
import org.libreplan.business.common.exceptions.InstanceNotFoundException;
import org.libreplan.business.common.exceptions.ValidationException;
import org.libreplan.business.externalcompanies.daos.IExternalCompanyDAO;
import org.libreplan.business.externalcompanies.entities.CommunicationType;
import org.libreplan.business.externalcompanies.entities.ExternalCompany;
import org.libreplan.business.orders.daos.IOrderDAO;
import org.libreplan.business.orders.entities.HoursGroup;
import org.libreplan.business.orders.entities.Order;
import org.libreplan.business.orders.entities.OrderLine;
import org.libreplan.business.orders.entities.SchedulingDataForVersion;
import org.libreplan.business.orders.entities.TaskSource;
import org.libreplan.business.orders.entities.TaskSource.TaskSourceSynchronization;
import org.libreplan.business.planner.daos.ISubcontractedTaskDataDAO;
import org.libreplan.business.planner.daos.ISubcontractorCommunicationDAO;
import org.libreplan.business.planner.daos.ITaskElementDAO;
import org.libreplan.business.planner.daos.ITaskSourceDAO;
import org.libreplan.business.planner.entities.SubcontractedTaskData;
import org.libreplan.business.planner.entities.SubcontractorCommunication;
import org.libreplan.business.planner.entities.SubcontractorDeliverDate;
import org.libreplan.business.planner.entities.Task;
import org.libreplan.business.scenarios.IScenarioManager;
import org.libreplan.business.scenarios.bootstrap.IScenariosBootstrap;
import org.libreplan.business.scenarios.entities.OrderVersion;
import org.libreplan.business.test.calendars.entities.BaseCalendarTest;
import org.libreplan.business.test.externalcompanies.daos.ExternalCompanyDAOTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
/**
* Tests for {@link SubcontractorCommunication}.
*
* @author Susana Montes Pedreira <smontes@wirelessgalicia.com>
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { BUSINESS_SPRING_CONFIG_FILE, BUSINESS_SPRING_CONFIG_TEST_FILE })
public class SubcontractorCommunicationDAOTest {
@Autowired
ISubcontractorCommunicationDAO subcontractorCommunicationDAO;
@Autowired
ISubcontractedTaskDataDAO subcontractedTaskDataDAO;
@Autowired
IExternalCompanyDAO externalCompanyDAO;
@Autowired
private ITaskElementDAO taskElementDAO;
@Autowired
private IOrderDAO orderDAO;
@Autowired
private ITaskSourceDAO taskSourceDAO;
@Autowired
private SessionFactory sessionFactory;
@Autowired
private IScenarioManager scenarioManager;
@Autowired
private IBaseCalendarDAO calendarDAO;
@Autowired
private IScenariosBootstrap scenariosBootstrap;
@Before
public void loadRequiredData() {
scenariosBootstrap.loadRequiredData();
}
private ExternalCompany getSubcontractorExternalCompanySaved() {
ExternalCompany externalCompany = ExternalCompanyDAOTest.createValidExternalCompany();
externalCompany.setSubcontractor(true);
externalCompanyDAO.save(externalCompany);
externalCompanyDAO.flush();
sessionFactory.getCurrentSession().evict(externalCompany);
externalCompany.dontPoseAsTransientObjectAnymore();
return externalCompany;
}
private OrderLine createOrderLine() {
OrderLine orderLine = OrderLine.create();
orderLine.setName("bla");
orderLine.setCode("code-" + UUID.randomUUID());
HoursGroup hoursGroup = new HoursGroup();
hoursGroup.setCode("hours-group-code-" + UUID.randomUUID());
orderLine.addHoursGroup(hoursGroup);
Order order = Order.create();
OrderVersion orderVersion = ResourceAllocationDAOTest.setupVersionUsing(scenarioManager, order);
order.setName("bla-" + UUID.randomUUID());
order.setInitDate(new Date());
order.setCode("code-" + UUID.randomUUID());
order.useSchedulingDataFor(orderVersion);
order.add(orderLine);
// Add a basic calendar
BaseCalendar basicCalendar = BaseCalendarTest.createBasicCalendar();
calendarDAO.save(basicCalendar);
order.setCalendar(basicCalendar);
try {
orderDAO.save(order);
sessionFactory.getCurrentSession().flush();
} catch (ValidationException e) {
throw new RuntimeException(e);
}
return orderLine;
}
private Task createValidTask() {
HoursGroup associatedHoursGroup = new HoursGroup();
associatedHoursGroup.setCode("hours-group-code-" + UUID.randomUUID());
OrderLine orderLine = createOrderLine();
orderLine.addHoursGroup(associatedHoursGroup);
OrderVersion orderVersion = ResourceAllocationDAOTest.setupVersionUsing(scenarioManager, orderLine.getOrder());
orderLine.useSchedulingDataFor(orderVersion);
SchedulingDataForVersion schedulingDataForVersion = orderLine.getCurrentSchedulingDataForVersion();
TaskSource taskSource =
TaskSource.create(schedulingDataForVersion, Collections.singletonList(associatedHoursGroup));
TaskSourceSynchronization mustAdd = TaskSource.mustAdd(taskSource);
mustAdd.apply(TaskSource.persistTaskSources(taskSourceDAO));
return (Task) taskSource.getTask();
}
public SubcontractedTaskData createValidSubcontractedTaskData() {
Task task = createValidTask();
SubcontractedTaskData subcontractedTaskData = SubcontractedTaskData.create(task);
subcontractedTaskData.addRequiredDeliveringDates(SubcontractorDeliverDate.create(new Date(),new Date(), null));
subcontractedTaskData.setExternalCompany(getSubcontractorExternalCompanySaved());
task.setSubcontractedTaskData(subcontractedTaskData);
taskElementDAO.save(task);
taskElementDAO.flush();
sessionFactory.getCurrentSession().evict(task);
sessionFactory.getCurrentSession().evict(subcontractedTaskData);
subcontractedTaskDataDAO.save(subcontractedTaskData);
return subcontractedTaskData;
}
public SubcontractorCommunication createValidSubcontractorCommunication(){
SubcontractedTaskData subcontractedTaskData = createValidSubcontractedTaskData();
Date communicationDate = new Date();
return SubcontractorCommunication.create(
subcontractedTaskData, CommunicationType.NEW_PROJECT, communicationDate, false);
}
@Test
@Transactional
public void testSubcontractorCommunicationDAOInSpringContainer() {
assertNotNull(subcontractorCommunicationDAO);
}
@Test
@Transactional
public void testSaveSubcontractorCommunication() {
SubcontractorCommunication subcontractorCommunication = createValidSubcontractorCommunication();
subcontractorCommunicationDAO.save(subcontractorCommunication);
assertTrue(subcontractorCommunication.getId() != null);
}
@Test
@Transactional
public void testRemoveSubcontractorCommunication() throws InstanceNotFoundException {
SubcontractorCommunication subcontractorCommunication = createValidSubcontractorCommunication();
subcontractorCommunicationDAO.save(subcontractorCommunication);
assertTrue(subcontractorCommunication.getId() != null);
Long idSubcontractedTaskData = subcontractorCommunication.getSubcontractedTaskData().getId();
Long idCommunication = subcontractorCommunication.getId();
subcontractorCommunicationDAO.remove(subcontractorCommunication.getId());
try {
subcontractorCommunicationDAO.findExistingEntity(idCommunication);
fail("error");
} catch(RuntimeException ignored) {
// Ok
}
try {
subcontractedTaskDataDAO.findExistingEntity(idSubcontractedTaskData);
} catch(RuntimeException e) {
fail("error");
}
}
@Test
@Transactional
public void testSaveSubcontractorCommunicationWithoutSubcontractedTaskData() throws InstanceNotFoundException {
SubcontractorCommunication subcontractorCommunication = createValidSubcontractorCommunication();
subcontractorCommunication.setSubcontractedTaskData(null);
try {
subcontractorCommunicationDAO.save(subcontractorCommunication);
fail("It should throw an exception");
} catch (ValidationException ignored) {
// Ok
}
}
} | agpl-3.0 |
OpenLMIS/open-lmis | modules/report/src/main/java/org/openlmis/report/mapper/lookup/EquipmentTypeReportMapper.java | 1435 | /*
* Electronic Logistics Management Information System (eLMIS) is a supply chain management system for health commodities in a developing country setting.
*
* Copyright (C) 2015 John Snow, Inc (JSI). This program was produced for the U.S. Agency for International Development (USAID). It was prepared under the USAID | DELIVER PROJECT, Task Order 4.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openlmis.report.mapper.lookup;
import org.apache.ibatis.annotations.Select;
import org.openlmis.report.model.dto.EquipmentType;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface EquipmentTypeReportMapper {
@Select("SELECT *" +
" FROM " +
" equipment_types order by name")
List<EquipmentType> getEquipmentTypeList();
}
| agpl-3.0 |
inspectIT/inspectIT | inspectit.server.diagnosis/src/test/java/rocks/inspectit/server/diagnosis/service/rules/testrules/RuleB.java | 480 | package rocks.inspectit.server.diagnosis.service.rules.testrules;
import rocks.inspectit.server.diagnosis.engine.rule.annotation.Action;
import rocks.inspectit.server.diagnosis.engine.rule.annotation.Rule;
import rocks.inspectit.server.diagnosis.engine.rule.annotation.TagValue;
/**
* @author Alexander Wert
*
*/
@Rule(name = "RuleB")
public class RuleB {
@TagValue(type = "A")
String input;
@Action(resultTag = "B")
public int action() {
return input.length();
}
}
| agpl-3.0 |
roidelapluie/Gadgetbridge | app/src/main/java/nodomain/freeyourgadget/gadgetbridge/service/btle/actions/WriteAction.java | 2829 | /* Copyright (C) 2015-2017 Andreas Shimokawa, Carsten Pfeiffer, Daniele
Gobbetti, Uwe Hermann
This file is part of Gadgetbridge.
Gadgetbridge is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Gadgetbridge is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
package nodomain.freeyourgadget.gadgetbridge.service.btle.actions;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import nodomain.freeyourgadget.gadgetbridge.Logging;
import nodomain.freeyourgadget.gadgetbridge.service.btle.BtLEAction;
/**
* Invokes a write operation on a given GATT characteristic.
* The result status will be made available asynchronously through the
* {@link BluetoothGattCallback}
*/
public class WriteAction extends BtLEAction {
private static final Logger LOG = LoggerFactory.getLogger(WriteAction.class);
private final byte[] value;
public WriteAction(BluetoothGattCharacteristic characteristic, byte[] value) {
super(characteristic);
this.value = value;
}
@Override
public boolean run(BluetoothGatt gatt) {
BluetoothGattCharacteristic characteristic = getCharacteristic();
int properties = characteristic.getProperties();
//TODO: expectsResult should return false if PROPERTY_WRITE_NO_RESPONSE is true, but this leads to timing issues
if ((properties & BluetoothGattCharacteristic.PROPERTY_WRITE) > 0 || ((properties & BluetoothGattCharacteristic.PROPERTY_WRITE_NO_RESPONSE) > 0)) {
return writeValue(gatt, characteristic, value);
}
return false;
}
protected boolean writeValue(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, byte[] value) {
if (LOG.isDebugEnabled()) {
LOG.debug("writing to characteristic: " + characteristic.getUuid() + ": " + Logging.formatBytes(value));
}
if (characteristic.setValue(value)) {
return gatt.writeCharacteristic(characteristic);
}
return false;
}
protected final byte[] getValue() {
return value;
}
@Override
public boolean expectsResult() {
return true;
}
}
| agpl-3.0 |
zoranh/SeavusJB3 | src/java/com/sapienter/jbilling/server/payment/db/PaymentInvoiceMapDAS.java | 2244 | /*
jBilling - The Enterprise Open Source Billing System
Copyright (C) 2003-2011 Enterprise jBilling Software Ltd. and Emiliano Conde
This file is part of jbilling.
jbilling is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
jbilling is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with jbilling. If not, see <http://www.gnu.org/licenses/>.
*/
package com.sapienter.jbilling.server.payment.db;
import com.sapienter.jbilling.server.invoice.db.InvoiceDAS;
import com.sapienter.jbilling.server.invoice.db.InvoiceDTO;
import com.sapienter.jbilling.server.util.db.AbstractDAS;
import org.hibernate.Criteria;
import org.hibernate.criterion.Restrictions;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.List;
/**
*
* @author abimael
*
*/
public class PaymentInvoiceMapDAS extends AbstractDAS<PaymentInvoiceMapDTO> {
public PaymentInvoiceMapDTO create(InvoiceDTO invoice, PaymentDTO payment, BigDecimal realAmount) {
PaymentInvoiceMapDTO map = new PaymentInvoiceMapDTO();
map.setInvoiceEntity(invoice);
map.setPayment(payment);
map.setAmount(realAmount);
map.setCreateDatetime(Calendar.getInstance().getTime());
return save(map);
}
public void deleteAllWithInvoice(InvoiceDTO invoice) {
InvoiceDTO inv = new InvoiceDAS().find(invoice.getId());
Criteria criteria = getSession().createCriteria(PaymentInvoiceMapDTO.class);
criteria.add(Restrictions.eq("invoiceEntity", inv));
List<PaymentInvoiceMapDTO> results = criteria.list();
if (results != null && !results.isEmpty()) {
for (PaymentInvoiceMapDTO paym : results) {
delete(paym);
}
}
}
}
| agpl-3.0 |
magnevan/Arithmic-Agents | src/FIPA/DateTimeHelper.java | 3558 | /*
* File: ./FIPA/DATETIMEHELPER.JAVA
* From: FIPA.IDL
* Date: Mon Sep 04 15:08:50 2000
* By: idltojava Java IDL 1.2 Nov 10 1997 13:52:11
*/
package FIPA;
public class DateTimeHelper {
// It is useless to have instances of this class
private DateTimeHelper() { }
public static void write(org.omg.CORBA.portable.OutputStream out, FIPA.DateTime that) {
out.write_short(that.year);
out.write_short(that.month);
out.write_short(that.day);
out.write_short(that.hour);
out.write_short(that.minutes);
out.write_short(that.seconds);
out.write_short(that.milliseconds);
out.write_char(that.typeDesignator);
}
public static FIPA.DateTime read(org.omg.CORBA.portable.InputStream in) {
FIPA.DateTime that = new FIPA.DateTime();
that.year = in.read_short();
that.month = in.read_short();
that.day = in.read_short();
that.hour = in.read_short();
that.minutes = in.read_short();
that.seconds = in.read_short();
that.milliseconds = in.read_short();
that.typeDesignator = in.read_char();
return that;
}
public static FIPA.DateTime extract(org.omg.CORBA.Any a) {
org.omg.CORBA.portable.InputStream in = a.create_input_stream();
return read(in);
}
public static void insert(org.omg.CORBA.Any a, FIPA.DateTime that) {
org.omg.CORBA.portable.OutputStream out = a.create_output_stream();
write(out, that);
a.read_value(out.create_input_stream(), type());
}
private static org.omg.CORBA.TypeCode _tc;
synchronized public static org.omg.CORBA.TypeCode type() {
int _memberCount = 8;
org.omg.CORBA.StructMember[] _members = null;
if (_tc == null) {
_members= new org.omg.CORBA.StructMember[8];
_members[0] = new org.omg.CORBA.StructMember(
"year",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_short),
null);
_members[1] = new org.omg.CORBA.StructMember(
"month",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_short),
null);
_members[2] = new org.omg.CORBA.StructMember(
"day",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_short),
null);
_members[3] = new org.omg.CORBA.StructMember(
"hour",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_short),
null);
_members[4] = new org.omg.CORBA.StructMember(
"minutes",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_short),
null);
_members[5] = new org.omg.CORBA.StructMember(
"seconds",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_short),
null);
_members[6] = new org.omg.CORBA.StructMember(
"milliseconds",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_short),
null);
_members[7] = new org.omg.CORBA.StructMember(
"typeDesignator",
org.omg.CORBA.ORB.init().get_primitive_tc(org.omg.CORBA.TCKind.tk_char),
null);
_tc = org.omg.CORBA.ORB.init().create_struct_tc(id(), "DateTime", _members);
}
return _tc;
}
public static String id() {
return "IDL:FIPA/DateTime:1.0";
}
}
| lgpl-2.1 |
JordanReiter/railo | railo-java/railo-core/src/railo/runtime/interpreter/ref/op/BigMulti.java | 508 | package railo.runtime.interpreter.ref.op;
import railo.runtime.PageContext;
import railo.runtime.exp.PageException;
import railo.runtime.interpreter.ref.Ref;
/**
* Minus operation
*/
public final class BigMulti extends Big {
/**
* constructor of the class
* @param left
* @param right
*/
public BigMulti(Ref left, Ref right) {
super(left,right);
}
@Override
public Object getValue(PageContext pc) throws PageException {
return getLeft(pc).multiply(getRight(pc)).toString();
}
}
| lgpl-2.1 |
it-tavis/opencms-core | src/org/opencms/synchronize/Messages.java | 6225 | /*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.synchronize;
import org.opencms.i18n.A_CmsMessageBundle;
import org.opencms.i18n.I_CmsMessageBundle;
/**
* Convenience class to access the localized messages of this OpenCms package.<p>
*
* @since 6.0.0
*/
public final class Messages extends A_CmsMessageBundle {
/** Message constant for key in the resource bundle. */
public static final String ERR_CREATE_DIR_1 = "ERR_CREATE_DIR_1";
/** Message constant for key in the resource bundle. */
public static final String ERR_CREATE_FILE_1 = "ERR_CREATE_FILE_1";
/** Message constant for key in the resource bundle. */
public static final String ERR_EXISTENT_FILE_1 = "ERR_EXISTENT_FILE_1";
/** Message constant for key in the resource bundle. */
public static final String ERR_IMPORT_1 = "ERR_IMPORT_1";
/** Message constant for key in the resource bundle. */
public static final String ERR_INIT_SYNC_0 = "ERR_INIT_SYNC_0";
/** Message constant for key in the resource bundle. */
public static final String ERR_IO_WRITE_SYNCLIST_0 = "ERR_IO_WRITE_SYNCLIST_0";
/** Message constant for key in the resource bundle. */
public static final String ERR_NO_RFS_DESTINATION_0 = "ERR_NO_RFS_DESTINATION_0";
/** Message constant for key in the resource bundle. */
public static final String ERR_NO_VFS_SOURCE_0 = "ERR_NO_VFS_SOURCE_0";
/** Message constant for key in the resource bundle. */
public static final String ERR_READ_SYNC_LIST_0 = "ERR_READ_SYNC_LIST_0";
/** Message constant for key in the resource bundle. */
public static final String ERR_READING_FILE_1 = "ERR_READING_FILE_1";
/** Message constant for key in the resource bundle. */
public static final String ERR_RFS_DESTINATION_NO_WRITE_1 = "ERR_RFS_DESTINATION_NO_WRITE_1";
/** Message constant for key in the resource bundle. */
public static final String ERR_RFS_DESTINATION_NOT_THERE_1 = "ERR_RFS_DESTINATION_NOT_THERE_1";
/** Message constant for key in the resource bundle. */
public static final String ERR_WRITE_FILE_0 = "ERR_WRITE_FILE_0";
/** Message constant for key in the resource bundle. */
public static final String LOG_EXTERNAL_TRANSLATION_1 = "LOG_EXTERNAL_TRANSLATION_1";
/** Message constant for key in the resource bundle. */
public static final String LOG_SYNCHRONIZE_EXPORT_FAILED_1 = "LOG_SYNCHRONIZE_EXPORT_FAILED_1";
/** Message constant for key in the resource bundle. */
public static final String LOG_SYNCHRONIZE_UPDATE_FAILED_1 = "LOG_SYNCHRONIZE_UPDATE_FAILED_1";
/** Message constant for key in the resource bundle. */
public static final String RPT_DEL_FILE_0 = "RPT_DEL_FILE_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_DEL_FOLDER_0 = "RPT_DEL_FOLDER_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_DEL_FS_FILE_0 = "RPT_DEL_FS_FILE_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_DEL_FS_FOLDER_0 = "RPT_DEL_FS_FOLDER_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_EXCLUDING_0 = "RPT_EXCLUDING_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_EXPORT_FILE_0 = "RPT_EXPORT_FILE_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_EXPORT_FOLDER_0 = "RPT_EXPORT_FOLDER_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_FROM_FS_TO_0 = "RPT_FROM_FS_TO_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_IMPORT_FILE_0 = "RPT_IMPORT_FILE_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_IMPORT_FOLDER_0 = "RPT_IMPORT_FOLDER_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_SKIPPING_0 = "RPT_SKIPPING_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_TO_FS_AS_0 = "RPT_TO_FS_AS_0";
/** Message constant for key in the resource bundle. */
public static final String RPT_UPDATE_FILE_0 = "RPT_UPDATE_FILE_0";
/** Name of the used resource bundle. */
private static final String BUNDLE_NAME = "org.opencms.synchronize.messages";
/** Static instance member. */
private static final I_CmsMessageBundle INSTANCE = new Messages();
/**
* Hides the public constructor for this utility class.<p>
*/
private Messages() {
// hide the constructor
}
/**
* Returns an instance of this localized message accessor.<p>
*
* @return an instance of this localized message accessor
*/
public static I_CmsMessageBundle get() {
return INSTANCE;
}
/**
* Returns the bundle name for this OpenCms package.<p>
*
* @return the bundle name for this OpenCms package
*/
public String getBundleName() {
return BUNDLE_NAME;
}
} | lgpl-2.1 |
windauer/exist | exist-core/src/main/java/org/exist/storage/dom/CreatePageLoggable.java | 2956 | /*
* eXist Open Source Native XML Database
* Copyright (C) 2001-2015 The eXist Project
*
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.exist.storage.dom;
import java.nio.ByteBuffer;
import org.exist.storage.DBBroker;
import org.exist.storage.NativeBroker;
import org.exist.storage.journal.AbstractLoggable;
import org.exist.storage.journal.LogException;
import org.exist.storage.txn.Txn;
/**
* @author wolf
*/
public class CreatePageLoggable extends AbstractLoggable {
protected long prevPage;
protected long newPage;
protected long nextPage;
protected short nextTID;
private DOMFile domDb = null;
public CreatePageLoggable(final Txn transaction, final long prevPage, final long newPage, final long nextPage) {
this(transaction, prevPage, newPage, nextPage, (short) -1);
}
public CreatePageLoggable(final Txn transaction, final long prevPage, final long newPage, final long nextPage, final short nextTID) {
super(DOMFile.LOG_CREATE_PAGE, transaction.getId());
this.prevPage = prevPage;
this.newPage = newPage;
this.nextPage = nextPage;
this.nextTID = nextTID;
}
public CreatePageLoggable(final DBBroker broker, final long transactId) {
super(DOMFile.LOG_CREATE_PAGE, transactId);
this.domDb = broker == null ? null : ((NativeBroker) broker).getDOMFile();
}
@Override
public void write(final ByteBuffer out) {
out.putInt((int) prevPage);
out.putInt((int) newPage);
out.putInt((int) nextPage);
out.putShort(nextTID);
}
@Override
public void read(final ByteBuffer in) {
prevPage = in.getInt();
newPage = in.getInt();
nextPage = in.getInt();
nextTID = in.getShort();
}
@Override
public int getLogSize() {
return 14;
}
@Override
public void redo() throws LogException {
domDb.redoCreatePage(this);
}
@Override
public void undo() throws LogException {
domDb.undoCreatePage(this);
}
@Override
public String dump() {
return super.dump() + " - new page created: " + newPage + "; prev. page: " + prevPage + "; next page: " + nextPage;
}
}
| lgpl-2.1 |
dana-i2cat/opennaas | utils/old-cim/LogicalIdentity.java | 2489 | /**
* This file was auto-generated by mofcomp -j version 1.0.0 on Wed Jan 12
* 09:21:06 CET 2011.
*/
package org.opennaas.extensions.router.model;
import java.io.*;
/**
* This Class contains accessor and mutator methods for all properties defined in the CIM class LogicalIdentity as well as methods comparable to the
* invokeMethods defined for this class. This Class implements the LogicalIdentityBean Interface. The CIM class LogicalIdentity is described as
* follows:
*
* CIM_LogicalIdentity is an abstract and generic association, indicating that two ManagedElements represent different aspects of the same underlying
* entity. This relationship conveys what could be defined with multiple inheritance. In most scenarios, the Identity relationship is determined by
* the equivalence of Keys or some other identifying properties of the related Elements. This relationship is reasonable in several scenarios. For
* example, it could be used to represent that a LogicalDevice is both a 'bus' entity and a 'functional' entity. A Device could be both a USB (bus)
* and a Keyboard (functional) entity.
*/
public class LogicalIdentity extends Association implements Serializable {
/**
* This constructor creates a LogicalIdentityBeanImpl Class which implements the LogicalIdentityBean Interface, and encapsulates the CIM class
* LogicalIdentity in a Java Bean. The CIM class LogicalIdentity is described as follows:
*
* CIM_LogicalIdentity is an abstract and generic association, indicating that two ManagedElements represent different aspects of the same
* underlying entity. This relationship conveys what could be defined with multiple inheritance. In most scenarios, the Identity relationship is
* determined by the equivalence of Keys or some other identifying properties of the related Elements. This relationship is reasonable in several
* scenarios. For example, it could be used to represent that a LogicalDevice is both a 'bus' entity and a 'functional' entity. A Device could be
* both a USB (bus) and a Keyboard (functional) entity.
*/
protected LogicalIdentity() {
};
/**
* This method create an Association of the type LogicalIdentity between one ManagedElement object and ManagedElement object
*/
public static LogicalIdentity link(ManagedElement
systemElement, ManagedElement sameElement) {
return (LogicalIdentity) Association.link(LogicalIdentity.class, systemElement, sameElement);
}// link
} // Class LogicalIdentity
| lgpl-3.0 |
hsestupin/galaxy | src/main/java/co/paralleluniverse/galaxy/netty/NettyUtils.java | 1092 | package co.paralleluniverse.galaxy.netty;
import org.jboss.netty.util.ThreadNameDeterminer;
import java.util.concurrent.ThreadPoolExecutor;
/**
* Some constants used for configuring netty thread pools.
*
* @author s.stupin
*/
public class NettyUtils {
/**
* Copy of {@link org.jboss.netty.channel.socket.nio.SelectorUtil#DEFAULT_IO_THREADS}
*/
public static final int DEFAULT_IO_THREADS = Runtime.getRuntime().availableProcessors() * 2;
/**
* Copy of {@link org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory#DEFAULT_BOSS_COUNT}
*/
public static final int DEFAULT_BOSS_COUNT = 1;
public static final ThreadNameDeterminer KEEP_UNCHANGED_DETERMINER = new ThreadNameDeterminer() {
@Override
public String determineThreadName(String currentThreadName, String proposedThreadName) throws Exception {
return currentThreadName;
}
};
public static int getWorkerCount(ThreadPoolExecutor workerExecutor) {
return Math.min(workerExecutor.getMaximumPoolSize(), DEFAULT_IO_THREADS);
}
}
| lgpl-3.0 |
Alfresco/community-edition | projects/remote-api/source/test-java/org/alfresco/rest/api/tests/TestCustomProperty.java | 40542 | /*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.tests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.alfresco.repo.dictionary.Facetable;
import org.alfresco.repo.dictionary.IndexTokenisationMode;
import org.alfresco.rest.api.model.CustomAspect;
import org.alfresco.rest.api.model.CustomModel;
import org.alfresco.rest.api.model.CustomModelConstraint;
import org.alfresco.rest.api.model.CustomModelNamedValue;
import org.alfresco.rest.api.model.CustomModelProperty;
import org.alfresco.rest.api.model.CustomType;
import org.alfresco.rest.api.model.CustomModel.ModelStatus;
import org.alfresco.rest.api.tests.client.HttpResponse;
import org.alfresco.rest.api.tests.util.RestApiUtil;
import org.alfresco.service.cmr.dictionary.CustomModelService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.junit.Test;
/**
* Tests the REST API of the properties of the {@link CustomModelService}.
*
* @author Jamal Kaabi-Mofrad
*/
public class TestCustomProperty extends BaseCustomModelApiTest
{
@Test
public void testCreateProperties() throws Exception
{
setRequestContext(customModelAdmin);
String modelName = "testModel" + System.currentTimeMillis();
Pair<String, String> namespacePair = getTestNamespaceUriPrefixPair();
// Create the model as a Model Administrator
createCustomModel(modelName, namespacePair, ModelStatus.DRAFT);
{
// Create aspect
String aspectName = "testAspect1" + System.currentTimeMillis();
CustomAspect aspect = createTypeAspect(CustomAspect.class, modelName, aspectName, null, null, null);
// Update the Aspect by adding property
CustomAspect payload = new CustomAspect();
String aspectPropName = "testAspect1Prop1" + System.currentTimeMillis();
CustomModelProperty aspectProp = new CustomModelProperty();
aspectProp.setName(aspectPropName);
aspectProp.setTitle("property title");
aspectProp.setMultiValued(true);
aspectProp.setIndexed(true);
aspectProp.setFacetable(Facetable.TRUE);
aspectProp.setIndexTokenisationMode(IndexTokenisationMode.BOTH);
List<CustomModelProperty> props = new ArrayList<>(1);
props.add(aspectProp);
payload.setProperties(props);
setRequestContext(nonAdminUserName);
// Try to update the aspect as a non Admin user
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 403);
setRequestContext(customModelAdmin);
// Try to update the aspect as a Model Administrator
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 400); // Type name is mandatory
// Add the mandatory aspect name to the payload
payload.setName(aspectName);
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 200);
// Retrieve the updated aspect
HttpResponse response = getSingle("cmm/" + modelName + "/aspects", aspect.getName(), 200);
CustomAspect returnedAspect = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomAspect.class);
// Check the aspect's added property
assertEquals(1, returnedAspect.getProperties().size());
CustomModelProperty customModelProperty = returnedAspect.getProperties().get(0);
assertEquals(aspectPropName, customModelProperty.getName());
assertEquals("property title", customModelProperty.getTitle());
assertEquals(namespacePair.getSecond() + QName.NAMESPACE_PREFIX + aspectPropName, customModelProperty.getPrefixedName());
assertEquals("Default data type is 'd:text'.", "d:text", customModelProperty.getDataType());
assertNull(customModelProperty.getDescription());
assertTrue(customModelProperty.isMultiValued());
assertFalse(customModelProperty.isMandatory());
assertFalse(customModelProperty.isMandatoryEnforced());
assertNull(customModelProperty.getDefaultValue());
assertTrue(customModelProperty.isIndexed());
assertEquals(Facetable.TRUE, customModelProperty.getFacetable());
assertEquals(IndexTokenisationMode.BOTH, customModelProperty.getIndexTokenisationMode());
// Test duplicate property name
aspectProp = new CustomModelProperty();
aspectProp.setName(aspectPropName); // Existing name
aspectProp.setTitle("new property title");
props = new ArrayList<>(1);
props.add(aspectProp);
payload.setProperties(props);
// Try to update the aspect as a Model Administrator
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 409); // property name already exists
}
{
// Create type
String typeName = "testType1" + System.currentTimeMillis();
CustomType type = createTypeAspect(CustomType.class, modelName, typeName, "test type1 title", "test type1 Desc", "cm:content");
// Update the Type by adding property
CustomType payload = new CustomType();
String typePropName = "testType1Prop1" + System.currentTimeMillis();
CustomModelProperty typeProp = new CustomModelProperty();
typeProp.setName(typePropName);
typeProp.setTitle("property title");
typeProp.setDataType("d:int");
typeProp.setIndexed(false);
typeProp.setFacetable(Facetable.FALSE);
typeProp.setIndexTokenisationMode(IndexTokenisationMode.FALSE);
List<CustomModelProperty> props = new ArrayList<>(1);
props.add(typeProp);
payload.setProperties(props);
setRequestContext(nonAdminUserName);
// Try to update the type as a non Admin user
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 403);
setRequestContext(customModelAdmin);
// Try to update the type as a Model Administrator
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 400); // Type name is mandatory
// Add the mandatory type name to the payload
payload.setName(typeName);
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 200);
// Retrieve the updated type
HttpResponse response = getSingle("cmm/" + modelName + "/types", type.getName(), 200);
CustomType returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
// Check the type's added property
assertEquals(1, returnedType.getProperties().size());
CustomModelProperty customModelProperty = returnedType.getProperties().get(0);
assertEquals(typePropName, customModelProperty.getName());
assertEquals("property title", customModelProperty.getTitle());
assertEquals(namespacePair.getSecond() + QName.NAMESPACE_PREFIX + typePropName, customModelProperty.getPrefixedName());
assertEquals("d:int", customModelProperty.getDataType());
assertNull(customModelProperty.getDescription());
assertFalse(customModelProperty.isMultiValued());
assertFalse(customModelProperty.isMandatory());
assertFalse(customModelProperty.isMandatoryEnforced());
assertNull(customModelProperty.getDefaultValue());
assertFalse(customModelProperty.isIndexed());
assertEquals(Facetable.FALSE, customModelProperty.getFacetable());
assertEquals(IndexTokenisationMode.FALSE, customModelProperty.getIndexTokenisationMode());
// Retrieve the updated type with all the properties (include inherited)
response = getSingle("cmm/" + modelName + "/types", type.getName()+SELECT_ALL_PROPS, 200);
returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
assertEquals(3, returnedType.getProperties().size());
// Check for the inherited properties
assertNotNull(getProperty(returnedType.getProperties(), "content")); // cm:content
assertNotNull(getProperty(returnedType.getProperties(), "name")); // cm:name
// Create another property and set all of its attributes
payload = new CustomType();
payload.setName(typeName);
String typePropName2 = "testType1Prop2" + System.currentTimeMillis();
typeProp = new CustomModelProperty();
typeProp.setName(typePropName2);
typeProp.setTitle("property2 title");
typeProp.setDescription("property2 desciption");
typeProp.setDataType("d:int");
typeProp.setDefaultValue("0");
typeProp.setMultiValued(false);
typeProp.setMandatory(true);
typeProp.setMandatoryEnforced(true);
props = new ArrayList<>(1);
props.add(typeProp);
payload.setProperties(props);
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 200);
// Retrieve the updated type
response = getSingle("cmm/" + modelName + "/types", type.getName(), 200);
returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
// Check the type's added property
assertEquals(2, returnedType.getProperties().size());
customModelProperty = getProperty(returnedType.getProperties(), typePropName2);
assertNotNull(customModelProperty);
assertEquals(typePropName2, customModelProperty.getName());
assertEquals("property2 title", customModelProperty.getTitle());
assertEquals(namespacePair.getSecond() + QName.NAMESPACE_PREFIX + typePropName2, customModelProperty.getPrefixedName());
assertEquals("d:int", customModelProperty.getDataType());
assertEquals("property2 desciption", customModelProperty.getDescription());
assertFalse(customModelProperty.isMultiValued());
assertTrue(customModelProperty.isMandatory());
assertTrue(customModelProperty.isMandatoryEnforced());
assertEquals("0", customModelProperty.getDefaultValue());
// Test duplicate property name
typeProp = new CustomModelProperty();
typeProp.setName(typePropName2); // Existing name
typeProp.setTitle("new property title");
typeProp.setDataType("d:text");
props = new ArrayList<>(1);
props.add(typeProp);
payload.setProperties(props);
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(payload), SELECT_PROPS_QS, 409); // property name already exists
}
}
@Test
public void testDeleteProperty() throws Exception
{
setRequestContext(customModelAdmin);
String modelName = "testModelDeleteProp" + System.currentTimeMillis();
Pair<String, String> namespacePair = getTestNamespaceUriPrefixPair();
// Create the model as a Model Administrator
createCustomModel(modelName, namespacePair, ModelStatus.DRAFT);
/*
* Create aspect and update it by adding two properties
*/
String aspectName = "testAspect1" + System.currentTimeMillis();
createTypeAspect(CustomAspect.class, modelName, aspectName, null, null, null);
// Update the Aspect by adding property - property one
CustomAspect aspectPayload = new CustomAspect();
aspectPayload.setName(aspectName);
String aspectPropNameOne = "testAspect1Prop1" + System.currentTimeMillis();
CustomModelProperty aspectPropOne = new CustomModelProperty();
aspectPropOne.setName(aspectPropNameOne);
aspectPropOne.setTitle("aspect property one title");
aspectPropOne.setMultiValued(true);
List<CustomModelProperty> props = new ArrayList<>(1);
props.add(aspectPropOne);
aspectPayload.setProperties(props);
// create property one
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 200);
// Update the Aspect by adding another property - property two
aspectPayload = new CustomAspect();
aspectPayload.setName(aspectName);
String aspectPropNameTwo = "testAspect1Prop2" + System.currentTimeMillis();
CustomModelProperty aspectPropTwo = new CustomModelProperty();
aspectPropTwo.setName(aspectPropNameTwo);
aspectPropTwo.setTitle("aspect property two title");
aspectPropTwo.setMandatory(true);
aspectPropTwo.setDataType("d:int");
aspectPropTwo.setDefaultValue("1");
props = new ArrayList<>(1);
props.add(aspectPropTwo);
aspectPayload.setProperties(props);
// create property two
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 200);
// Retrieve the updated aspect
HttpResponse response = getSingle("cmm/" + modelName + "/aspects", aspectName, 200);
CustomAspect returnedAspect = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomAspect.class);
// Check the aspect's added properties
assertEquals(2, returnedAspect.getProperties().size());
/*
* Create type and update it by adding two properties
*/
String typeName = "testType1" + System.currentTimeMillis();
createTypeAspect(CustomType.class, modelName, typeName, "test type1 title", null, "cm:content");
// Update the Type by adding property - property one
CustomType typePayload = new CustomType();
typePayload.setName(typeName);
String typePropNameOne = "testType1Prop1" + System.currentTimeMillis();
CustomModelProperty typePropOne = new CustomModelProperty();
typePropOne.setName(typePropNameOne);
typePropOne.setTitle("type property one title");
props = new ArrayList<>(1);
props.add(typePropOne);
typePayload.setProperties(props);
// create property one
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(typePayload), SELECT_PROPS_QS, 200);
// Update the Type by adding another property - property two
typePayload = new CustomType();
typePayload.setName(typeName);
// Create inline MINMAX constraint
CustomModelConstraint inlineMinMaxConstraint = new CustomModelConstraint();
inlineMinMaxConstraint.setType("MINMAX");
inlineMinMaxConstraint.setTitle("test MINMAX title");
// Create the MinMax constraint's parameters
List<CustomModelNamedValue> parameters = new ArrayList<>(2);
parameters.add(buildNamedValue("maxValue", "100.0"));
parameters.add(buildNamedValue("minValue", "0.0"));
// Add the parameters into the constraint
inlineMinMaxConstraint.setParameters(parameters);
String typePropNameTwo = "testType1Prop2" + System.currentTimeMillis();
CustomModelProperty typePropTwo = new CustomModelProperty();
typePropTwo.setName(typePropNameTwo);
typePropTwo.setTitle("type property two title");
typePropTwo.setDataType("d:int");
typePropTwo.setConstraints(Arrays.asList(inlineMinMaxConstraint)); // add the inline constraint
props = new ArrayList<>(1);
props.add(typePropTwo);
typePayload.setProperties(props);
// create property one
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(typePayload), SELECT_PROPS_QS, 200);
// Retrieve the updated type
response = getSingle("cmm/" + modelName + "/types", typeName, 200);
CustomType returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
// Check the type's added properties
assertEquals(2, returnedType.getProperties().size());
// Delete aspect's property one - model is inactive
{
final String deletePropOneAspectQS = getPropDeleteUpdateQS(aspectPropNameOne, true);
// Try to delete propertyOne from aspect
put("cmm/" + modelName + "/aspects", aspectName, null, deletePropOneAspectQS, 400); // missing payload
CustomAspect deletePropAspectPayload = new CustomAspect();
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(deletePropAspectPayload), deletePropOneAspectQS, 400); // missing aspect name
setRequestContext(nonAdminUserName);
deletePropAspectPayload.setName(aspectName);
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(deletePropAspectPayload), deletePropOneAspectQS, 403); // unauthorised
setRequestContext(customModelAdmin);
// Delete as a Model Administrator
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(deletePropAspectPayload), deletePropOneAspectQS, 200);
// Check the property has been deleted
response = getSingle("cmm/" + modelName + "/aspects", aspectName, 200);
returnedAspect = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomAspect.class);
assertEquals(1, returnedAspect.getProperties().size());
assertFalse("Property one should have been deleted.", aspectPropNameOne.equals(returnedAspect.getProperties().get(0).getName()));
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(deletePropAspectPayload), deletePropOneAspectQS, 404); //Not found
}
// Delete type's property two - model is inactive
{
final String deletePropTwoTypeQS = getPropDeleteUpdateQS(typePropNameTwo, true);
// Try to delete propertyOne from type
put("cmm/" + modelName + "/types", typeName, null, deletePropTwoTypeQS, 400); // missing payload
CustomType deletePropTypePayload = new CustomType();
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(deletePropTypePayload), deletePropTwoTypeQS,
400); // missing type name
setRequestContext(nonAdminUserName);
deletePropTypePayload.setName(typeName);
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(deletePropTypePayload), deletePropTwoTypeQS, 403); // unauthorised
setRequestContext(customModelAdmin);
// Delete as a Model Administrator
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(deletePropTypePayload), deletePropTwoTypeQS, 200);
// Check the property has been deleted
response = getSingle("cmm/" + modelName + "/types", typeName, 200);
returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
assertEquals(1, returnedType.getProperties().size());
assertFalse("Property two should have been deleted.", typePropNameTwo.equals(returnedType.getProperties().get(0).getName()));
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(deletePropTypePayload), deletePropTwoTypeQS, 404); //Not found
}
// Note: at the time of writing, we can't delete a property of an active model, as ModelValidatorImpl.validateIndexedProperty depends on Solr
}
@Test
public void testUpdateProperty() throws Exception
{
setRequestContext(customModelAdmin);
String modelName = "testModelUpdateProp" + System.currentTimeMillis();
Pair<String, String> namespacePair = getTestNamespaceUriPrefixPair();
// Create the model as a Model Administrator
createCustomModel(modelName, namespacePair, ModelStatus.DRAFT);
/*
* Create aspect and update it by adding a property
*/
String aspectName = "testAspect1" + System.currentTimeMillis();
createTypeAspect(CustomAspect.class, modelName, aspectName, null, null, null);
// Update the Aspect by adding property
CustomAspect aspectPayload = new CustomAspect();
aspectPayload.setName(aspectName);
String aspectPropName = "testAspect1Prop" + System.currentTimeMillis();
CustomModelProperty aspectProp = new CustomModelProperty();
aspectProp.setName(aspectPropName);
aspectProp.setTitle("aspect property title");
aspectProp.setMultiValued(true);
List<CustomModelProperty> props = new ArrayList<>(1);
props.add(aspectProp);
aspectPayload.setProperties(props);
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 200);
// Retrieve the updated aspect
HttpResponse response = getSingle("cmm/" + modelName + "/aspects", aspectName, 200);
CustomAspect returnedAspect = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomAspect.class);
// Check the aspect's added property
assertEquals(1, returnedAspect.getProperties().size());
/*
* Create type and update it by adding a property
*/
String typeName = "testType1" + System.currentTimeMillis();
createTypeAspect(CustomType.class, modelName, typeName, "test type1 title", null, "cm:content");
// Update the Type by adding property - property one
CustomType typePayload = new CustomType();
typePayload.setName(typeName);
// Create inline MINMAX constraint
CustomModelConstraint inlineMinMaxConstraint = new CustomModelConstraint();
inlineMinMaxConstraint.setType("MINMAX");
inlineMinMaxConstraint.setTitle("test MINMAX title");
// Create the MinMax constraint's parameters
List<CustomModelNamedValue> parameters = new ArrayList<>(2);
parameters.add(buildNamedValue("maxValue", "100.0"));
parameters.add(buildNamedValue("minValue", "0.0"));
// Add the parameters into the constraint
inlineMinMaxConstraint.setParameters(parameters);
String typePropName = "testType1Prop" + System.currentTimeMillis();
CustomModelProperty typeProp = new CustomModelProperty();
typeProp.setName(typePropName);
typeProp.setDataType("d:int");
typeProp.setTitle("type property title");
typeProp.setDefaultValue("0");
typeProp.setConstraints(Arrays.asList(inlineMinMaxConstraint)); // add the inline constraint
props = new ArrayList<>(1);
props.add(typeProp);
typePayload.setProperties(props);
// create property
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(typePayload), SELECT_PROPS_QS, 200);
// Retrieve the updated type
response = getSingle("cmm/" + modelName + "/types", typeName, 200);
CustomType returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
// Check the type's added property
assertEquals(1, returnedType.getProperties().size());
// Update aspect's property - model is inactive
{
final String updatePropOneAspectQS = getPropDeleteUpdateQS(aspectPropName, false);
// Try to update property from aspect
put("cmm/" + modelName + "/aspects", aspectName, null, updatePropOneAspectQS, 400); // missing payload
CustomAspect updatePropAspectPayload = new CustomAspect();
CustomModelProperty propertyAspect = new CustomModelProperty();
propertyAspect.setTitle("new Title");
propertyAspect.setDescription("new Desc");
propertyAspect.setDataType("d:int"); // the original value was d:text
propertyAspect.setMultiValued(false); // the original value was true
propertyAspect.setMandatory(true); // the original value was false
propertyAspect.setDefaultValue("10");
List<CustomModelProperty> modifiedProp = new ArrayList<>(1);
modifiedProp.add(propertyAspect);
updatePropAspectPayload.setProperties(modifiedProp);
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(updatePropAspectPayload), updatePropOneAspectQS, 400); // missing aspect name
// set a random name
updatePropAspectPayload.setName(aspectName + System.currentTimeMillis());
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(updatePropAspectPayload), updatePropOneAspectQS, 404); // Aspect not found
// set the correct name
updatePropAspectPayload.setName(aspectName);
// the requested property name dose not match the payload
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(updatePropAspectPayload), updatePropOneAspectQS, 400);
// set the property name that matches the requested property
propertyAspect.setName(aspectPropName);
setRequestContext(nonAdminUserName);
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(updatePropAspectPayload), updatePropOneAspectQS, 403); // unauthorised
setRequestContext(customModelAdmin);
// Update as a Model Administrator
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(updatePropAspectPayload), updatePropOneAspectQS, 200);
// Check the property has been updated
response = getSingle("cmm/" + modelName + "/aspects", aspectName, 200);
returnedAspect = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomAspect.class);
assertEquals(1, returnedAspect.getProperties().size());
CustomModelProperty modifiedAspectProperty = returnedAspect.getProperties().get(0);
compareCustomModelProperties(propertyAspect, modifiedAspectProperty, "prefixedName", "indexTokenisationMode");
}
// Activate the model
CustomModel statusPayload = new CustomModel();
statusPayload.setStatus(ModelStatus.ACTIVE);
put("cmm", modelName, RestApiUtil.toJsonAsString(statusPayload), SELECT_STATUS_QS, 200);
// Update type's property - model is active
{
final String updatePropTwoTypeQS = getPropDeleteUpdateQS(typePropName, false);
CustomType updatePropTypePayload = new CustomType();
updatePropTypePayload.setName(typeName);
CustomModelProperty propertyType = new CustomModelProperty();
propertyType.setName(typePropName);
propertyType.setTitle("new Title");
propertyType.setDescription("new Desc");
propertyType.setDataType("d:long"); // the original value was d:int
propertyType.setDefaultValue("5");
List<CustomModelProperty> modifiedProp = new ArrayList<>(1);
modifiedProp.add(propertyType);
updatePropTypePayload.setProperties(modifiedProp);
setRequestContext(nonAdminUserName);
// Unauthorised
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 403);
setRequestContext(customModelAdmin);
// Try to update an active model as a Model Administrator - Cannot change the data type of the property of an active model
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 409);
// Set the data type with its original value
propertyType.setDataType("d:int");
propertyType.setMultiValued(true);// the original value was false
// Cannot change the multi-valued option of the property of an active model
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 409);
propertyType.setMultiValued(false);
propertyType.setMandatory(true);// the original value was false
// Cannot change the mandatory option of the property of an active model
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 409);
propertyType.setMandatory(false);
propertyType.setMandatoryEnforced(true);// the original value was false
// Cannot change the mandatory-enforced option of the property of an active model
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 409);
// Set the mandatory-enforced with its original value
propertyType.setMandatoryEnforced(false);
// Update the MinMax constraint's parameters
parameters = new ArrayList<>(2);
parameters.add(buildNamedValue("maxValue", "120.0")); // the original value was 100.0
parameters.add(buildNamedValue("minValue", "20.0")); // the original value was 0.0
// Add the parameters into the constraint
inlineMinMaxConstraint.setParameters(parameters);
propertyType.setConstraints(Arrays.asList(inlineMinMaxConstraint)); // add the updated inline constraint
// Try to Update - constraint violation. The default value is 5 which is not in the MinMax range [20, 120]
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 409);
propertyType.setDefaultValue("25"); // we changed the MinMax constraint to be [20, 120]
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 200);
// Check the property has been updated
response = getSingle("cmm/" + modelName + "/types", typeName, 200);
returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
assertEquals(1, returnedType.getProperties().size());
CustomModelProperty modifiedTypeProperty = returnedType.getProperties().get(0);
assertEquals("new Title", modifiedTypeProperty.getTitle());
assertEquals("new Desc", modifiedTypeProperty.getDescription());
assertEquals("25", modifiedTypeProperty.getDefaultValue());
assertEquals("Shouldn't be able to change the data type of the property of an active model." ,"d:int", modifiedTypeProperty.getDataType());
assertFalse(modifiedTypeProperty.isMandatory());
assertFalse(modifiedTypeProperty.isMultiValued());
assertFalse(modifiedTypeProperty.isMandatoryEnforced());
assertEquals(1, modifiedTypeProperty.getConstraints().size());
CustomModelConstraint modifiedConstraint = modifiedTypeProperty.getConstraints().get(0);
assertEquals("MINMAX", modifiedConstraint.getType());
assertEquals("120.0", getParameterSimpleValue(modifiedConstraint.getParameters(), "maxValue"));
assertEquals("20.0", getParameterSimpleValue(modifiedConstraint.getParameters(), "minValue"));
// Change the constraint type and parameter
inlineMinMaxConstraint.setType("LENGTH");
inlineMinMaxConstraint.setTitle("test LENGTH title");
parameters = new ArrayList<>(2);
parameters.add(buildNamedValue("maxLength", "256"));
parameters.add(buildNamedValue("minLength", "0"));
// Add the parameters into the constraint
inlineMinMaxConstraint.setParameters(parameters);
propertyType.setConstraints(Arrays.asList(inlineMinMaxConstraint));
// LENGTH can only be used with textual data type
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 400);
//update the property by removing the constraint
propertyType.setConstraints(Collections.<CustomModelConstraint>emptyList());
put("cmm/" + modelName + "/types", typeName, RestApiUtil.toJsonAsString(updatePropTypePayload), updatePropTwoTypeQS, 200);
response = getSingle("cmm/" + modelName + "/types", typeName, 200);
returnedType = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), CustomType.class);
assertEquals(1, returnedType.getProperties().size());
modifiedTypeProperty = returnedType.getProperties().get(0);
assertEquals(0, modifiedTypeProperty.getConstraints().size());
}
}
@Test
public void testValidatePropertyDefaultValue() throws Exception
{
setRequestContext(customModelAdmin);
String modelName = "testModelPropDefaultValue" + System.currentTimeMillis();
Pair<String, String> namespacePair = getTestNamespaceUriPrefixPair();
// Create the model as a Model Administrator
createCustomModel(modelName, namespacePair, ModelStatus.DRAFT);
/*
* Create aspect and update it by adding a property
*/
String aspectName = "testAspect1" + System.currentTimeMillis();
createTypeAspect(CustomAspect.class, modelName, aspectName, null, null, null);
// Update the Aspect by adding property
CustomAspect aspectPayload = new CustomAspect();
aspectPayload.setName(aspectName);
String aspectPropName = "testAspectProp" + System.currentTimeMillis();
final String updatePropAspectQS = getPropDeleteUpdateQS(aspectPropName, false);
CustomModelProperty aspectProp = new CustomModelProperty();
aspectProp.setName(aspectPropName);
aspectProp.setTitle("aspect property title");
List<CustomModelProperty> props = new ArrayList<>(1);
props.add(aspectProp);
aspectPayload.setProperties(props);
// d:int tests
{
aspectProp.setDataType("d:int");
aspectProp.setDefaultValue(" ");// space
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 400);
aspectProp.setDefaultValue("abc"); // text
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 400);
aspectProp.setDefaultValue("1.0"); // double
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 400);
aspectProp.setDefaultValue("1,2,3"); // text
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 400);
}
// d:float tests
{
aspectProp.setDataType("d:float");
aspectProp.setDefaultValue(" ");// space
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 400);
aspectProp.setDefaultValue("abc"); // text
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 400);
aspectProp.setDefaultValue("1,2,3"); // text
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 400);
aspectProp.setDefaultValue("1.0"); // float
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), SELECT_PROPS_QS, 200);
aspectProp.setDefaultValue("1.0f"); // float - update
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), updatePropAspectQS, 200);
aspectProp.setDefaultValue("1.0d"); // double - update
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), updatePropAspectQS, 200);
}
// d:boolean tests
{
aspectProp.setDataType("d:boolean");
aspectProp.setDefaultValue(" ");// space
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), updatePropAspectQS, 400);
aspectProp.setDefaultValue("abc"); // text
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), updatePropAspectQS, 400);
aspectProp.setDefaultValue("1"); // number
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), updatePropAspectQS, 400);
aspectProp.setDefaultValue("true"); // valid value
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), updatePropAspectQS, 200);
aspectProp.setDefaultValue("false"); // valid value
// create property
put("cmm/" + modelName + "/aspects", aspectName, RestApiUtil.toJsonAsString(aspectPayload), updatePropAspectQS, 200);
}
}
private String getPropDeleteUpdateQS(String propName, boolean isDelete)
{
String req = (isDelete ? "&delete=" : "&update=");
return SELECT_PROPS_QS + req + propName;
}
}
| lgpl-3.0 |
polytechnice-si/4A_ISA_TheCookieFactory | j2e/src/main/java/fr/unice/polytech/isa/tcf/asynchronous/KitchenPrinter.java | 2968 | package fr.unice.polytech.isa.tcf.asynchronous;
import fr.unice.polytech.isa.tcf.entities.Order;
import org.apache.openejb.util.LogCategory;
import javax.annotation.Resource;
import javax.ejb.ActivationConfigProperty;
import javax.ejb.MessageDriven;
import javax.ejb.MessageDrivenContext;
import javax.jms.*;
import javax.jms.IllegalStateException;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
@MessageDriven(activationConfig = {
@ActivationConfigProperty( propertyName = "destinationType", propertyValue = "javax.jms.Queue"),
@ActivationConfigProperty( propertyName = "destination", propertyValue ="/queue/kitchen/printer") })
public class KitchenPrinter implements MessageListener {
private static final org.apache.openejb.util.Logger log =
org.apache.openejb.util.Logger.getInstance(LogCategory.ACTIVEMQ, KitchenPrinter.class);
@Resource
private MessageDrivenContext context;
/**
* Message-based reception (automatically handled by the container)
* @param message a JMS message that contains an Order
* @throws RuntimeException
*/
@Override
public void onMessage(Message message) {
try {
Order data = (Order) ((ObjectMessage) message).getObject();
handle(data);
} catch (JMSException e) {
log.error("Java message service exception while handling " + message);
log.error(e.getMessage(), e);
context.setRollbackOnly();
}
}
@PersistenceContext private EntityManager entityManager;
/**
* Business logic to process an Order (basically sysout)
* @param data
* @throws IllegalStateException
*/
private void handle(Order data) throws IllegalStateException {
Order d = entityManager.merge(data);
try {
log.info("KitchenPrinter:\n Printing order #"+d.getId());
Thread.sleep(4000); // it takes time ... 4 seconds actually
log.info("\n " + d);
log.info("\n done ["+d.getId()+"]");
respond(d.getId());
} catch (InterruptedException | JMSException e) {
log.error(e.getMessage(), e);
throw new IllegalStateException(e.toString());
}
}
/**
** Resources necessary to support asynchronous responses
*/
@Resource private ConnectionFactory connectionFactory;
@Resource(name = "KitchenPrinterAck") private Queue acknowledgmentQueue;
/**
* Send the processed order ID to the response Queue (as text: "#{ID};PRINTED")"
* @param orderId
*/
private void respond(int orderId) throws JMSException {
Connection connection = null;
Session session = null;
try {
connection = connectionFactory.createConnection();
connection.start();
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(acknowledgmentQueue);
producer.setDeliveryMode(DeliveryMode.NON_PERSISTENT);
producer.send(session.createTextMessage(orderId + ";PRINTED"));
} finally {
if (session != null)
session.close();
if (connection != null)
connection.close();
}
}
}
| lgpl-3.0 |
jjettenn/molgenis | molgenis-core-ui/src/main/java/org/molgenis/ui/menumanager/MenuManagerController.java | 5042 | package org.molgenis.ui.menumanager;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.TreeTraverser;
import org.molgenis.data.settings.AppSettings;
import org.molgenis.file.FileStore;
import org.molgenis.framework.ui.MolgenisPlugin;
import org.molgenis.ui.*;
import org.molgenis.ui.menu.Menu;
import org.molgenis.util.FileUploadUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.Part;
import javax.validation.Valid;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import static org.molgenis.ui.menumanager.MenuManagerController.URI;
import static org.springframework.http.HttpStatus.OK;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import static org.springframework.web.bind.annotation.RequestMethod.POST;
/**
* Plugin to view and modify the app UI menu
*/
@Controller
@RequestMapping(URI)
public class MenuManagerController extends MolgenisPluginController
{
public static final String ID = "menumanager";
public static final String URI = MolgenisPluginController.PLUGIN_URI_PREFIX + ID;
private final MenuManagerService menuManagerService;
private final FileStore fileStore;
private final MolgenisUi molgenisUi;
private final AppSettings appSettings;
private static final String ERRORMESSAGE_LOGO = "The logo needs to be an image file like png or jpg.";
@Autowired
public MenuManagerController(MenuManagerService menuManagerService, FileStore fileStore, MolgenisUi molgenisUi,
AppSettings appSettings)
{
super(URI);
if (menuManagerService == null) throw new IllegalArgumentException("menuManagerService is null");
if (molgenisUi == null) throw new IllegalArgumentException("molgenisUi is null");
if (fileStore == null) throw new IllegalArgumentException("fileStore is null");
if (appSettings == null) throw new IllegalArgumentException("appSettings is null");
this.menuManagerService = menuManagerService;
this.molgenisUi = molgenisUi;
this.fileStore = fileStore;
this.appSettings = appSettings;
}
@RequestMapping(method = GET)
public String init(Model model)
{
List<MolgenisUiMenuItem> menus = new TreeTraverser<MolgenisUiMenuItem>()
{
@Override
public Iterable<MolgenisUiMenuItem> children(MolgenisUiMenuItem root)
{
if (root.getType() == MolgenisUiMenuItemType.MENU)
{
MolgenisUiMenu menu = (MolgenisUiMenu) root;
return Iterables.filter(menu.getItems(), new Predicate<MolgenisUiMenuItem>()
{
@Override
public boolean apply(MolgenisUiMenuItem molgenisUiMenuItem)
{
return molgenisUiMenuItem.getType() == MolgenisUiMenuItemType.MENU;
}
});
}
else return Collections.emptyList();
}
}.preOrderTraversal(molgenisUi.getMenu()).toList();
List<MolgenisPlugin> plugins = Lists.newArrayList(menuManagerService.getPlugins());
Collections.sort(plugins, new Comparator<MolgenisPlugin>()
{
@Override
public int compare(MolgenisPlugin molgenisPlugin1, MolgenisPlugin molgenisPlugin2)
{
return molgenisPlugin1.getId().compareTo(molgenisPlugin2.getId());
}
});
model.addAttribute("menus", menus);
model.addAttribute("plugins", plugins);
model.addAttribute("molgenis_ui", molgenisUi);
return "view-menumanager";
}
@RequestMapping(value = "/save", method = POST)
@ResponseStatus(OK)
public void save(@Valid @RequestBody Menu molgenisMenu)
{
menuManagerService.saveMenu(molgenisMenu);
}
@RequestMapping(value = "logo", method = POST)
public void uploadLogo(@Valid @RequestBody File newLogo)
{
System.out.println(newLogo.getName());
}
/**
* Upload a new molgenis logo
*
* @param part
* @param model
* @return model
* @throws IOException
*/
@PreAuthorize("hasAnyRole('ROLE_SU')")
@RequestMapping(value = "/upload-logo", method = RequestMethod.POST)
public String uploadLogo(@RequestParam("logo") Part part, Model model) throws IOException
{
String contentType = part.getContentType();
if ((contentType == null) || !contentType.startsWith("image"))
{
model.addAttribute("errorMessage", ERRORMESSAGE_LOGO);
}
else
{
// Create the logo subdir in the filestore if it doesn't exist
File logoDir = new File(fileStore.getStorageDir() + "/logo");
if (!logoDir.exists())
{
if (!logoDir.mkdir())
{
throw new IOException("Unable to create directory [" + logoDir.getAbsolutePath() + "]");
}
}
// Store the logo in the logo dir of the filestore
String file = "/logo/" + FileUploadUtils.getOriginalFileName(part);
fileStore.store(part.getInputStream(), file);
// Set logo
appSettings.setLogoNavBarHref(file);
}
return init(model);
}
}
| lgpl-3.0 |
kexianda/pig | src/org/apache/pig/impl/PigContext.java | 31304 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.impl;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.io.StringWriter;
import java.lang.reflect.Constructor;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.Vector;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Level;
import org.apache.pig.ExecType;
import org.apache.pig.ExecTypeProvider;
import org.apache.pig.FuncSpec;
import org.apache.pig.PigException;
import org.apache.pig.backend.datastorage.DataStorage;
import org.apache.pig.backend.datastorage.DataStorageException;
import org.apache.pig.backend.datastorage.ElementDescriptor;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.executionengine.ExecutionEngine;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.datastorage.HDataStorage;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
import org.apache.pig.impl.streaming.ExecutableManager;
import org.apache.pig.impl.streaming.StreamingCommand;
import org.apache.pig.tools.parameters.ParameterSubstitutionPreprocessor;
import org.apache.pig.tools.parameters.ParseException;
import org.apache.pig.tools.parameters.PreprocessorContext;
public class PigContext implements Serializable {
private static final long serialVersionUID = 1L;
private static final Log log = LogFactory.getLog(PigContext.class);
private static Object instantiationLock = new Object();
public static final String JOB_NAME = "jobName";
public static final String JOB_NAME_PREFIX= "PigLatin";
public static final String JOB_PRIORITY = "jobPriority";
public static final String PIG_CMD_ARGS_REMAINDERS = "pig.cmd.args.remainders";
/* NOTE: we only serialize some of the stuff
*
*(to make it smaller given that it's not all needed on the Hadoop side,
* and also because some is not serializable e.g. the Configuration)
*/
//one of: local, mapreduce, or a custom exec type for a different execution engine
private ExecType execType;
//main file system that jobs and shell commands access
transient private DataStorage dfs;
// local file system, where jar files, etc. reside
transient private DataStorage lfs;
// handle to the back-end
transient private ExecutionEngine executionEngine;
private Properties properties;
/*
* Resources for the job (jars, scripting udf files, cached macro abstract syntax trees)
*/
// Jar files that are global to the whole Pig script, includes
// 1. registered jars
// 2. Jars defined in -Dpig.additional.jars
transient public List<URL> extraJars = new LinkedList<URL>();
// original paths each extra jar came from
// used to avoid redundant imports
transient private Map<URL, String> extraJarOriginalPaths = new HashMap<URL, String>();
// jars needed for scripting udfs - jython.jar etc
transient public List<String> scriptJars = new ArrayList<String>(2);
// jars that are predeployed to the cluster and thus should not be merged in at all (even subsets).
transient public Vector<String> predeployedJars = new Vector<String>(2);
// script files that are needed to run a job
@Deprecated
public List<String> scriptFiles = new ArrayList<String>();
private Map<String,File> aliasedScriptFiles = new LinkedHashMap<String,File>();
// record of scripting udf file path --> which namespace it was registered to
// used to avoid redundant imports
transient public Map<String, String> scriptingUDFs;
// cache of macro file path --> abstract syntax tree
// used to avoid re-parsing the same macros over and over
transient public Map<String, Tree> macros;
/**
* a table mapping function names to function specs.
*/
private Map<String, FuncSpec> definedFunctions = new HashMap<String, FuncSpec>();
/**
* a table mapping names to streaming commands.
*/
private Map<String, StreamingCommand> definedCommands =
new HashMap<String, StreamingCommand>();
private static ThreadLocal<ArrayList<String>> packageImportList =
new ThreadLocal<ArrayList<String>>();
private static ThreadLocal<Map<String,Class<?>>> classCache =
new ThreadLocal<Map<String,Class<?>>>();
private Properties log4jProperties = new Properties();
private Level defaultLogLevel = Level.INFO;
public int defaultParallel = -1;
// Says, whether we're processing an explain right now. Explain
// might skip some check in the logical plan validation (file
// existence checks, etc).
public boolean inExplain = false;
// Where we are processing a dump schema right now
public boolean inDumpSchema = false;
// whether we're processing an ILLUSTRATE right now.
public boolean inIllustrator = false;
private String last_alias = null;
// List of paths skipped for automatic shipping
List<String> skippedShipPaths = new ArrayList<String>();
//@StaticDataCleanup
public static void staticDataCleanup() {
packageImportList.set(null);
}
/**
* extends URLClassLoader to allow adding to classpath as new jars
* are registered.
*/
private static class ContextClassLoader extends URLClassLoader {
public ContextClassLoader(ClassLoader classLoader) {
this(new URL[0], classLoader);
}
public ContextClassLoader(URL[] urls, ClassLoader classLoader) {
super(urls, classLoader);
}
@Override
public void addURL(URL url) {
super.addURL(url);
}
};
static private ContextClassLoader classloader = new ContextClassLoader(PigContext.class.getClassLoader());
/*
* Parameter-related fields
* params: list of strings "key=value" from the command line
* paramFiles: list of paths to parameter files
* preprocessorContext: manages parsing params and paramFiles into an actual map
*/
private List<String> params;
private List<String> paramFiles;
transient private PreprocessorContext preprocessorContext = new PreprocessorContext(50);
public List<String> getParams() {
return params;
}
public void setParams(List<String> params) {
this.params = params;
}
public List<String> getParamFiles() {
return paramFiles;
}
public void setParamFiles(List<String> paramFiles) {
this.paramFiles = paramFiles;
}
public PreprocessorContext getPreprocessorContext() {
return preprocessorContext;
}
public Map<String, String> getParamVal() throws IOException {
Map<String, String> paramVal = preprocessorContext.getParamVal();
if (paramVal == null) {
try {
preprocessorContext.loadParamVal(params, paramFiles);
} catch (ParseException e) {
throw new IOException(e.getMessage());
}
return preprocessorContext.getParamVal();
} else {
return paramVal;
}
}
public PigContext() {
this(ExecType.MAPREDUCE, new Properties());
}
public PigContext(Configuration conf) throws PigException {
this(ConfigurationUtil.toProperties(conf));
}
public PigContext(Properties properties) throws PigException {
this(ExecTypeProvider.selectExecType(properties), properties);
}
public PigContext(ExecType execType, Configuration conf) {
this(execType, ConfigurationUtil.toProperties(conf));
}
public PigContext(ExecType execType, Properties properties){
this.execType = execType;
this.properties = properties;
this.properties.setProperty("exectype", this.execType.name());
this.executionEngine = execType.getExecutionEngine(this);
// Add the default paths to be skipped for auto-shipping of commands
skippedShipPaths.add("/bin");
skippedShipPaths.add("/usr/bin");
skippedShipPaths.add("/usr/local/bin");
skippedShipPaths.add("/sbin");
skippedShipPaths.add("/usr/sbin");
skippedShipPaths.add("/usr/local/sbin");
macros = new HashMap<String, Tree>();
scriptingUDFs = new HashMap<String, String>();
init();
}
/**
* This method is created with the aim of unifying the Grunt and PigServer
* approaches, so all common initializations can go in here.
*/
private void init() {
if (properties.get("udf.import.list")!=null)
PigContext.initializeImportList((String)properties.get("udf.import.list"));
}
public static void initializeImportList(String importListCommandLineProperties)
{
StringTokenizer tokenizer = new StringTokenizer(importListCommandLineProperties, ":");
int pos = 1; // Leave "" as the first import
ArrayList<String> importList = getPackageImportList();
while (tokenizer.hasMoreTokens())
{
String importItem = tokenizer.nextToken();
if (!importItem.endsWith("."))
importItem += ".";
importList.add(pos, importItem);
pos++;
}
}
public void connect() throws ExecException {
executionEngine.init();
dfs = executionEngine.getDataStorage();
lfs = new HDataStorage(URI.create("file:///"), properties);
}
public void setJobtrackerLocation(String newLocation) {
executionEngine.setProperty(MRConfiguration.JOB_TRACKER, newLocation);
}
/**
* calls: addScriptFile(path, new File(path)), ensuring that a given path is
* added to the jar at most once.
* @param path
*/
public void addScriptFile(String path) {
addScriptFile(path, path);
}
/**
* this method adds script files that must be added to the shipped jar
* named differently from their local fs path.
* @param name name in the jar
* @param path path on the local fs
*/
public void addScriptFile(String name, String path) {
if (path != null) {
aliasedScriptFiles.put(name.replaceFirst("^/", "").replaceAll(":", ""), new File(path));
}
}
public void addScriptJar(String path) {
if (path != null && !scriptJars.contains(path)) {
scriptJars.add(path);
}
}
public void addJar(String path) throws MalformedURLException {
if (path != null) {
URL resource = (new File(path)).toURI().toURL();
addJar(resource, path);
}
}
public void addJar(URL resource, String originalPath) throws MalformedURLException{
if (resource != null && !extraJars.contains(resource)) {
extraJars.add(resource);
extraJarOriginalPaths.put(resource, originalPath);
classloader.addURL(resource);
Thread.currentThread().setContextClassLoader(PigContext.classloader);
}
}
public boolean hasJar(String path) {
for (URL url : extraJars) {
if (extraJarOriginalPaths.get(url).equals(path)) {
return true;
}
}
return false;
}
/**
* Adds the specified path to the predeployed jars list. These jars will
* never be included in generated job jar.
* <p>
* This can be called for jars that are pre-installed on the Hadoop
* cluster to reduce the size of the job jar.
*/
public void markJarAsPredeployed(String path) {
if (path != null && !predeployedJars.contains(path)) {
predeployedJars.add(path);
}
}
public String doParamSubstitution(InputStream in,
List<String> params,
List<String> paramFiles)
throws IOException {
return doParamSubstitution(new BufferedReader(new InputStreamReader(in)),
params, paramFiles);
}
public String doParamSubstitution(BufferedReader reader,
List<String> params,
List<String> paramFiles)
throws IOException {
this.params = params;
this.paramFiles = paramFiles;
return doParamSubstitution(reader);
}
public String doParamSubstitution(BufferedReader reader) throws IOException {
try {
preprocessorContext.setPigContext(this);
preprocessorContext.loadParamVal(params, paramFiles);
ParameterSubstitutionPreprocessor psp
= new ParameterSubstitutionPreprocessor(preprocessorContext);
StringWriter writer = new StringWriter();
psp.genSubstitutedFile(reader, writer);
return writer.toString();
} catch (ParseException e) {
log.error(e.getLocalizedMessage());
throw new IOException(e);
}
}
public BufferedReader doParamSubstitutionOutputToFile(BufferedReader reader,
String outputFilePath,
List<String> params,
List<String> paramFiles)
throws IOException {
this.params = params;
this.paramFiles = paramFiles;
return doParamSubstitutionOutputToFile(reader, outputFilePath);
}
public BufferedReader doParamSubstitutionOutputToFile(BufferedReader reader, String outputFilePath)
throws IOException {
try {
preprocessorContext.loadParamVal(params, paramFiles);
ParameterSubstitutionPreprocessor psp
= new ParameterSubstitutionPreprocessor(preprocessorContext);
BufferedWriter writer = new BufferedWriter(new FileWriter(outputFilePath));
psp.genSubstitutedFile(reader, writer);
return new BufferedReader(new FileReader(outputFilePath));
} catch (ParseException e) {
log.error(e.getLocalizedMessage());
throw new IOException(e);
} catch (FileNotFoundException e) {
throw new IOException("Could not find file to substitute parameters for: " + outputFilePath);
}
}
/**
* script files as name/file pairs to be added to the job jar
* @return name/file pairs
*/
public Map<String,File> getScriptFiles() {
return aliasedScriptFiles;
}
public void rename(String oldName, String newName) throws IOException {
if (oldName.equals(newName)) {
return;
}
System.out.println("Renaming " + oldName + " to " + newName);
ElementDescriptor dst = null;
ElementDescriptor src = null;
try {
dst = dfs.asElement(newName);
src = dfs.asElement(oldName);
}
catch (DataStorageException e) {
byte errSrc = getErrorSource();
int errCode = 0;
switch(errSrc) {
case PigException.REMOTE_ENVIRONMENT:
errCode = 6005;
break;
case PigException.USER_ENVIRONMENT:
errCode = 4005;
break;
default:
errCode = 2038;
break;
}
String msg = "Unable to rename " + oldName + " to " + newName;
throw new ExecException(msg, errCode, errSrc, e);
}
if (dst.exists()) {
dst.delete();
}
src.rename(dst);
}
public void copy(String src, String dst, boolean localDst) throws IOException {
DataStorage dstStorage = dfs;
if (localDst) {
dstStorage = lfs;
}
ElementDescriptor srcElement = null;
ElementDescriptor dstElement = null;
try {
srcElement = dfs.asElement(src);
dstElement = dstStorage.asElement(dst);
}
catch (DataStorageException e) {
byte errSrc = getErrorSource();
int errCode = 0;
switch(errSrc) {
case PigException.REMOTE_ENVIRONMENT:
errCode = 6006;
break;
case PigException.USER_ENVIRONMENT:
errCode = 4006;
break;
default:
errCode = 2039;
break;
}
String msg = "Unable to copy " + src + " to " + dst;
throw new ExecException(msg, errCode, errSrc, e);
}
srcElement.copy(dstElement, this.properties, false);
}
public ExecutionEngine getExecutionEngine() {
return executionEngine;
}
public DataStorage getDfs() {
return dfs;
}
public DataStorage getLfs() {
return lfs;
}
public DataStorage getFs() {
return dfs;
}
/**
* Provides configuration information.
*
* @return - information about the configuration used to connect to
* execution engine
*/
public Properties getProperties() {
return this.properties;
}
/**
* @deprecated use {@link #getProperties()} instead
*/
@Deprecated
public Properties getConf() {
return getProperties();
}
public String getLastAlias() {
return this.last_alias;
}
public void setLastAlias(String value) {
this.last_alias = value;
}
/**
* Defines an alias for the given function spec. This
* is useful for functions that require arguments to the
* constructor.
*
* @param function - the new function alias to define.
* @param functionSpec - the FuncSpec object representing the name of
* the function class and any arguments to constructor.
*
*/
public void registerFunction(String function, FuncSpec functionSpec) {
if (functionSpec == null) {
definedFunctions.remove(function);
} else {
definedFunctions.put(function, functionSpec);
}
}
/**
* Defines an alias for the given streaming command.
*
* This is useful for complicated streaming command specs.
*
* @param alias - the new command alias to define.
* @param command - the command
*/
public void registerStreamCmd(String alias, StreamingCommand command) {
if (command == null) {
definedCommands.remove(alias);
} else {
definedCommands.put(alias, command);
}
}
/**
* Returns the type of execution currently in effect.
*
* @return current execution type
*/
public ExecType getExecType() {
return execType;
}
/**
* Creates a Classloader based on the passed jarFile and any extra jar files.
*
* @param jarFile
* the jar file to be part of the newly created Classloader. This jar file plus any
* jars in the extraJars list will constitute the classpath.
* @return the new Classloader.
* @throws MalformedURLException
*/
public ClassLoader createCl(String jarFile) throws MalformedURLException {
int len = extraJars.size();
int passedJar = jarFile == null ? 0 : 1;
URL urls[] = new URL[len + passedJar];
if (jarFile != null) {
urls[0] = new URL("file:" + jarFile);
}
for (int i = 0; i < extraJars.size(); i++) {
urls[i + passedJar] = extraJars.get(i);
}
//return new URLClassLoader(urls, PigMapReduce.class.getClassLoader());
return new ContextClassLoader(urls, PigContext.class.getClassLoader());
}
private static Map<String,Class<?>> getClassCache() {
Map<String,Class<?>> c = classCache.get();
if (c == null) {
c = new HashMap<String,Class<?>>();
classCache.set(c);
}
return c;
}
@SuppressWarnings("rawtypes")
public static Class resolveClassName(String name) throws IOException{
Map<String,Class<?>> cache = getClassCache();
Class c = cache.get(name);
if (c != null) {
return c;
}
for(String prefix: getPackageImportList()) {
try {
c = Class.forName(prefix+name,true, PigContext.classloader);
cache.put(name, c);
return c;
}
catch (ClassNotFoundException e) {
// do nothing
}
catch (UnsupportedClassVersionError e) {
int errCode = 1069;
String msg = "Problem resolving class version numbers for class " + name;
throw new ExecException(msg, errCode, PigException.INPUT, e) ;
}
}
// create ClassNotFoundException exception and attach to IOException
// so that we don't need to buble interface changes throughout the code
int errCode = 1070;
String msg = "Could not resolve " + name + " using imports: " + packageImportList.get();
throw new ExecException(msg, errCode, PigException.INPUT);
}
/**
* A common Pig pattern for initializing objects via system properties is to support passing
* something like this on the command line:
* <code>-Dpig.notification.listener=MyClass</code>
* <code>-Dpig.notification.listener.arg=myConstructorStringArg</code>
*
* This method will properly initialize the class with the args, if they exist.
* @param conf
* @param classParamKey the property used to identify the class
* @param argParamKey the property used to identify the class args
* @param clazz The class that is expected
* @return <T> T
*/
public static <T> T instantiateObjectFromParams(Configuration conf,
String classParamKey,
String argParamKey,
Class<T> clazz) throws ExecException {
String className = conf.get(classParamKey);
if (className != null) {
FuncSpec fs;
if (conf.get(argParamKey) != null) {
fs = new FuncSpec(className, conf.get(argParamKey));
} else {
fs = new FuncSpec(className);
}
try {
return clazz.cast(PigContext.instantiateFuncFromSpec(fs));
}
catch (ClassCastException e) {
throw new ExecException("The class defined by " + classParamKey +
" in conf is not of type " + clazz.getName(), e);
}
} else {
return null;
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public static Object instantiateFuncFromSpec(FuncSpec funcSpec) {
Object ret;
String className =funcSpec.getClassName();
String[] args = funcSpec.getCtorArgs();
Class objClass = null ;
try {
objClass = resolveClassName(className);
}
catch(IOException ioe) {
throw new RuntimeException("Cannot instantiate: " + className, ioe) ;
}
// OptionBuilder is not thread-safe and HBaseStorage, elephantbird SequenceFileConfig, etc
// use them in constructor. This leads to NoSuchMethodException, UnrecognizedOptionException etc
// when processor, inputs and outputs are initialized in parallel in Tez
synchronized (instantiationLock) {
try {
// Do normal instantiation
if (args != null && args.length > 0) {
Class paramTypes[] = new Class[args.length];
for (int i = 0; i < paramTypes.length; i++) {
paramTypes[i] = String.class;
}
Constructor c = objClass.getConstructor(paramTypes);
ret = c.newInstance((Object[])args);
} else {
ret = objClass.newInstance();
}
}
catch(NoSuchMethodException nme) {
// Second chance. Try with var arg constructor
try {
Constructor c = objClass.getConstructor(String[].class);
Object[] wrappedArgs = new Object[1] ;
wrappedArgs[0] = args ;
ret = c.newInstance(wrappedArgs);
}
catch(Throwable e){
// bad luck
StringBuilder sb = new StringBuilder();
sb.append("could not instantiate '");
sb.append(className);
sb.append("' with arguments '");
sb.append(Arrays.toString(args));
sb.append("'");
throw new RuntimeException(sb.toString(), e);
}
}
catch(Throwable e){
// bad luck
StringBuilder sb = new StringBuilder();
sb.append("could not instantiate '");
sb.append(className);
sb.append("' with arguments '");
sb.append(Arrays.toString(args));
sb.append("'");
throw new RuntimeException(sb.toString(), e);
}
return ret;
}
}
public static Object instantiateFuncFromSpec(String funcSpec) {
return instantiateFuncFromSpec(new FuncSpec(funcSpec));
}
@SuppressWarnings("rawtypes")
public Class getClassForAlias(String alias) throws IOException{
String className = null;
FuncSpec funcSpec = null;
if (definedFunctions != null) {
funcSpec = definedFunctions.get(alias);
}
if (funcSpec != null) {
className = funcSpec.getClassName();
}else{
className = FuncSpec.getClassNameFromSpec(alias);
}
return resolveClassName(className);
}
public Object instantiateFuncFromAlias(String alias) throws IOException {
FuncSpec funcSpec;
if (definedFunctions != null && (funcSpec = definedFunctions.get(alias))!=null)
return instantiateFuncFromSpec(funcSpec);
else
return instantiateFuncFromSpec(alias);
}
/**
* Get the {@link StreamingCommand} for the given alias.
*
* @param alias the alias for the <code>StreamingCommand</code>
* @return <code>StreamingCommand</code> for the alias
*/
public StreamingCommand getCommandForAlias(String alias) {
return definedCommands.get(alias);
}
public void setExecType(ExecType execType) {
this.execType = execType;
}
/**
* Create a new {@link ExecutableManager} depending on the ExecType.
*
* @return a new {@link ExecutableManager} depending on the ExecType
* @throws ExecException
*/
public ExecutableManager createExecutableManager() throws ExecException {
if (executionEngine != null) {
return executionEngine.getExecutableManager();
}
return null;
}
public FuncSpec getFuncSpecFromAlias(String alias) {
FuncSpec funcSpec;
if (definedFunctions != null && (funcSpec = definedFunctions.get(alias))!=null)
return funcSpec;
else
return null;
}
/**
* Add a path to be skipped while automatically shipping binaries for
* streaming.
*
* @param path path to be skipped
*/
public void addPathToSkip(String path) {
skippedShipPaths.add(path);
}
/**
* Get paths which are to skipped while automatically shipping binaries for
* streaming.
*
* @return paths which are to skipped while automatically shipping binaries
* for streaming
*/
public List<String> getPathsToSkip() {
return skippedShipPaths;
}
/**
* Check the execution mode and return the appropriate error source
*
* @return error source
*/
public byte getErrorSource() {
return PigException.REMOTE_ENVIRONMENT;
}
public static ArrayList<String> getPackageImportList() {
if (packageImportList.get() == null) {
ArrayList<String> importlist = new ArrayList<String>();
importlist.add("");
importlist.add("java.lang.");
importlist.add("org.apache.pig.builtin.");
importlist.add("org.apache.pig.impl.builtin.");
packageImportList.set(importlist);
}
return packageImportList.get();
}
public static void setPackageImportList(ArrayList<String> list) {
packageImportList.set(list);
}
public void setLog4jProperties(Properties p)
{
log4jProperties = p;
}
public Properties getLog4jProperties()
{
return log4jProperties;
}
public Level getDefaultLogLevel()
{
return defaultLogLevel;
}
public void setDefaultLogLevel(Level l)
{
defaultLogLevel = l;
}
public int getDefaultParallel() {
return defaultParallel;
}
public static ClassLoader getClassLoader() {
return classloader;
}
public static void setClassLoader(ClassLoader cl) {
if (cl instanceof ContextClassLoader) {
classloader = (ContextClassLoader) cl;
} else {
classloader = new ContextClassLoader(cl);
}
}
}
| apache-2.0 |
ekirkilevics/iBatis | src/test/java/org/apache/ibatis/submitted/manyanno/AnnoPost.java | 1371 | /*
* Copyright 2009-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.submitted.manyanno;
import java.util.List;
public class AnnoPost {
private int id;
private String subject;
private String body;
private List<AnnoPostTag> tags;
public String getBody() {
return body;
}
public void setBody(String body) {
this.body = body;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public List<AnnoPostTag> getTags() {
return tags;
}
public void setTags(List<AnnoPostTag> tags) {
this.tags = tags;
}
}
| apache-2.0 |
SnappyDataInc/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/iapi/jdbc/WrapperEngineBLOB.java | 3086 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.iapi.jdbc;
import java.io.InputStream;
import java.io.OutputStream;
import java.sql.Blob;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
/**
* Encapsulate {@link Blob} for {@link EngineLOB} interface.
*
* @author swale
* @since 7.0
*/
public class WrapperEngineBLOB extends WrapperEngineLOB implements Blob {
private final Blob blob;
public WrapperEngineBLOB(final EngineConnection localConn, final Blob blob) {
super(localConn);
this.blob = blob;
}
/**
* {@inheritDoc}
*/
@Override
public long length() throws SQLException {
return this.blob.length();
}
/**
* {@inheritDoc}
*/
@Override
public byte[] getBytes(long pos, int length) throws SQLException {
return this.blob.getBytes(pos, length);
}
/**
* {@inheritDoc}
*/
@Override
public InputStream getBinaryStream() throws SQLException {
return this.blob.getBinaryStream();
}
/**
* {@inheritDoc}
*/
@Override
public long position(byte[] pattern, long start) throws SQLException {
return this.blob.position(pattern, start);
}
/**
* {@inheritDoc}
*/
@Override
public long position(Blob pattern, long start) throws SQLException {
return this.blob.position(pattern, start);
}
/**
* {@inheritDoc}
*/
@Override
public int setBytes(long pos, byte[] bytes) throws SQLException {
return this.blob.setBytes(pos, bytes);
}
/**
* {@inheritDoc}
*/
@Override
public int setBytes(long pos, byte[] bytes, int offset, int len)
throws SQLException {
return this.blob.setBytes(pos, bytes, offset, len);
}
/**
* {@inheritDoc}
*/
@Override
public OutputStream setBinaryStream(long pos) throws SQLException {
return this.blob.setBinaryStream(pos);
}
/**
* {@inheritDoc}
*/
@Override
public void truncate(long len) throws SQLException {
this.blob.truncate(len);
}
/**
* {@inheritDoc}
*/
@Override
public InputStream getBinaryStream(long pos, long length) throws SQLException {
return this.blob.getBinaryStream(pos, length);
}
/**
* {@inheritDoc}
*/
@Override
public void free() throws SQLException {
super.free();
try {
this.blob.free();
} catch (SQLFeatureNotSupportedException ignore) {
// ignore if free is not supported by the Blob implementation
}
}
}
| apache-2.0 |
rancherio/cattle | modules/model/src/main/java/io/cattle/platform/core/dao/impl/AccountDaoImpl.java | 4262 | package io.cattle.platform.core.dao.impl;
import io.cattle.platform.core.constants.AccountConstants;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.constants.CredentialConstants;
import io.cattle.platform.core.constants.ProjectConstants;
import io.cattle.platform.core.constants.ServiceConstants;
import io.cattle.platform.core.dao.AccountDao;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.core.model.Credential;
import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.util.type.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.jooq.Condition;
import org.jooq.Configuration;
import org.jooq.impl.DSL;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import static io.cattle.platform.core.model.tables.AccountTable.*;
import static io.cattle.platform.core.model.tables.CredentialTable.*;
import static io.cattle.platform.core.model.tables.ProjectMemberTable.*;
public class AccountDaoImpl extends AbstractJooqDao implements AccountDao {
private static final Set<String> GOOD_STATES = CollectionUtils.set(
CommonStatesConstants.CREATING,
CommonStatesConstants.ACTIVATING,
CommonStatesConstants.ACTIVE,
CommonStatesConstants.UPDATING,
ServiceConstants.STATE_UPGRADING);
ObjectManager objectManager;
public AccountDaoImpl(Configuration configuration, ObjectManager objectManager) {
super(configuration);
this.objectManager = objectManager;
}
@Override
public List<? extends Credential> getApiKeys(Account account, String kind, boolean active) {
if (kind == null) {
kind = CredentialConstants.KIND_API_KEY;
}
Condition stateCondition = DSL.trueCondition();
if ( active ) {
stateCondition = CREDENTIAL.STATE.eq(CommonStatesConstants.ACTIVE);
}
return create().selectFrom(CREDENTIAL)
.where(CREDENTIAL.ACCOUNT_ID.eq(account.getId())
.and(CREDENTIAL.REMOVED.isNull())
.and(stateCondition)
.and(CREDENTIAL.KIND.eq(kind)))
.fetch();
}
@Override
public Account findByUuid(String uuid) {
return create()
.selectFrom(ACCOUNT)
.where(ACCOUNT.UUID.eq(uuid))
.fetchOne();
}
@Override
public void deleteProjectMemberEntries(Account account) {
if (!ProjectConstants.TYPE.equalsIgnoreCase(account.getKind())
&& StringUtils.isNotBlank(account.getExternalId())
&& StringUtils.isNotBlank(account.getExternalIdType())){
create().delete(PROJECT_MEMBER)
.where(PROJECT_MEMBER.EXTERNAL_ID.eq(account.getExternalId())
.and(PROJECT_MEMBER.EXTERNAL_ID_TYPE.eq(account.getExternalIdType())))
.execute();
}
create().delete(PROJECT_MEMBER)
.where(PROJECT_MEMBER.PROJECT_ID.eq(account.getId()))
.execute();
}
@Override
public Account getAdminAccountExclude(long accountId) {
return create()
.selectFrom(ACCOUNT)
.where(ACCOUNT.STATE.in(getAccountActiveStates())
.and(ACCOUNT.KIND.eq(AccountConstants.ADMIN_KIND))
.and(ACCOUNT.ID.ne(accountId)))
.orderBy(ACCOUNT.ID.asc()).limit(1).fetchOne();
}
@Override
public Account getAccountById(Long id) {
return create()
.selectFrom(ACCOUNT)
.where(
ACCOUNT.ID.eq(id)
.and(ACCOUNT.STATE.ne(AccountConstants.STATE_PURGED))
.and(ACCOUNT.REMOVED.isNull())
).fetchOne();
}
@Override
public boolean isActiveAccount(Account account) {
return GOOD_STATES.contains(account.getState());
}
@Override
public List<String> getAccountActiveStates() {
return Arrays.asList(CommonStatesConstants.ACTIVE, ServiceConstants.STATE_UPGRADING);
}
}
| apache-2.0 |
apache/kylin | core-metadata/src/main/java/org/apache/kylin/measure/raw/RawSerializer.java | 3684 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.measure.raw;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.kylin.common.util.ByteArray;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.metadata.datatype.DataType;
import org.apache.kylin.metadata.datatype.DataTypeSerializer;
@SuppressWarnings("unused")
public class RawSerializer extends DataTypeSerializer<List<ByteArray>> {
//one dictionary id value need 1~4 bytes,length need 1~4 bytes, this buffer can contain 1024/(2 to 8) * 1024 values
//FIXME to config this and RowConstants.ROWVALUE_BUFFER_SIZE in properties file
public static final int RAW_BUFFER_SIZE = 1024 * 1024;//1M
public RawSerializer(DataType dataType) {
}
private List<ByteArray> current() {
List<ByteArray> l = (List<ByteArray>) current.get();
if (l == null) {
l = new ArrayList<ByteArray>();
current.set(l);
}
return l;
}
@Override
public int peekLength(ByteBuffer in) {
int mark = in.position();
int len = 0;
if (in.hasRemaining()) {
int size = BytesUtil.readVInt(in);
len = in.position() - mark;
for (int i = 0; i < size; i++) {
int length = BytesUtil.peekByteArrayLength(in);
in.position(in.position() + length);
len += length;
}
}
in.position(mark);
return len;
}
@Override
public int maxLength() {
return RAW_BUFFER_SIZE;
}
@Override
public int getStorageBytesEstimate() {
return 8;
}
@Override
public void serialize(List<ByteArray> values, ByteBuffer out) {
if (values == null) {
BytesUtil.writeVInt(0, out);
} else {
BytesUtil.writeVInt(values.size(), out);
for (ByteArray array : values) {
if (!out.hasRemaining() || out.remaining() < array.length()) {
throw new RuntimeException("BufferOverflow! Please use one higher cardinality column for dimension column when build RAW cube!");
}
BytesUtil.writeByteArray(BytesUtil.subarray(array.array(), array.offset(), array.offset() + array.length()), out);
}
}
}
@Override
public List<ByteArray> deserialize(ByteBuffer in) {
List<ByteArray> values = new ArrayList<>();
int size = BytesUtil.readVInt(in);
if (size >= 0) {
for (int i = 0; i < size; i++) {
ByteArray ba = new ByteArray(BytesUtil.readByteArray(in));
if (ba.length() != 0) {
values.add(ba);
}
}
} else {
throw new RuntimeException("Read error data size:" + size);
}
return values;
}
}
| apache-2.0 |
jamesward/play-morphia | src/play/modules/morphia/utils/SilentLogrFactory.java | 337 | package play.modules.morphia.utils;
import com.google.code.morphia.logging.Logr;
import com.google.code.morphia.logging.LogrFactory;
import com.google.code.morphia.logging.SilentLogger;
public class SilentLogrFactory implements LogrFactory {
@Override
public Logr get(Class<?> c) {
return new SilentLogger();
}
}
| apache-2.0 |
0359xiaodong/Android-Week-View | sample/src/main/java/com/alamkanak/weekview/sample/MainActivity.java | 10041 | package com.alamkanak.weekview.sample;
import android.app.Activity;
import android.graphics.RectF;
import android.os.Bundle;
import android.util.TypedValue;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.alamkanak.weekview.WeekView;
import com.alamkanak.weekview.WeekViewEvent;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
/**
* Created by Raquib-ul-Alam Kanak on 7/21/2014.
* Website: http://april-shower.com
*/
public class MainActivity extends Activity implements WeekView.MonthChangeListener,
WeekView.EventClickListener, WeekView.EventLongPressListener {
private static final int TYPE_DAY_VIEW = 1;
private static final int TYPE_THREE_DAY_VIEW = 2;
private static final int TYPE_WEEK_VIEW = 3;
private int mWeekViewType = TYPE_THREE_DAY_VIEW;
private WeekView mWeekView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Get a reference for the week view in the layout.
mWeekView = (WeekView) findViewById(R.id.weekView);
// Show a toast message about the touched event.
mWeekView.setOnEventClickListener(this);
// The week view has infinite scrolling horizontally. We have to provide the events of a
// month every time the month changes on the week view.
mWeekView.setMonthChangeListener(this);
// Set long press listener for events.
mWeekView.setEventLongPressListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id){
case R.id.action_today:
mWeekView.goToToday();
return true;
case R.id.action_day_view:
if (mWeekViewType != TYPE_DAY_VIEW) {
item.setChecked(!item.isChecked());
mWeekViewType = TYPE_DAY_VIEW;
mWeekView.setNumberOfVisibleDays(1);
// Lets change some dimensions to best fit the view.
mWeekView.setColumnGap((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 8, getResources().getDisplayMetrics()));
mWeekView.setTextSize((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 12, getResources().getDisplayMetrics()));
mWeekView.setEventTextSize((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 12, getResources().getDisplayMetrics()));
}
return true;
case R.id.action_three_day_view:
if (mWeekViewType != TYPE_THREE_DAY_VIEW) {
item.setChecked(!item.isChecked());
mWeekViewType = TYPE_THREE_DAY_VIEW;
mWeekView.setNumberOfVisibleDays(3);
// Lets change some dimensions to best fit the view.
mWeekView.setColumnGap((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 8, getResources().getDisplayMetrics()));
mWeekView.setTextSize((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 12, getResources().getDisplayMetrics()));
mWeekView.setEventTextSize((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 12, getResources().getDisplayMetrics()));
}
return true;
case R.id.action_week_view:
if (mWeekViewType != TYPE_WEEK_VIEW) {
item.setChecked(!item.isChecked());
mWeekViewType = TYPE_WEEK_VIEW;
mWeekView.setNumberOfVisibleDays(7);
// Lets change some dimensions to best fit the view.
mWeekView.setColumnGap((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 2, getResources().getDisplayMetrics()));
mWeekView.setTextSize((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 10, getResources().getDisplayMetrics()));
mWeekView.setEventTextSize((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 10, getResources().getDisplayMetrics()));
}
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public List<WeekViewEvent> onMonthChange(int newYear, int newMonth) {
// Populate the week view with some events.
List<WeekViewEvent> events = new ArrayList<WeekViewEvent>();
Calendar startTime = Calendar.getInstance();
startTime.set(Calendar.HOUR_OF_DAY, 3);
startTime.set(Calendar.MINUTE, 0);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
Calendar endTime = (Calendar) startTime.clone();
endTime.add(Calendar.HOUR, 1);
endTime.set(Calendar.MONTH, newMonth-1);
WeekViewEvent event = new WeekViewEvent(1, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_01));
events.add(event);
startTime = Calendar.getInstance();
startTime.set(Calendar.HOUR_OF_DAY, 3);
startTime.set(Calendar.MINUTE, 30);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
endTime = (Calendar) startTime.clone();
endTime.set(Calendar.HOUR_OF_DAY, 4);
endTime.set(Calendar.MINUTE, 30);
endTime.set(Calendar.MONTH, newMonth-1);
event = new WeekViewEvent(10, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_02));
events.add(event);
startTime = Calendar.getInstance();
startTime.set(Calendar.HOUR_OF_DAY, 4);
startTime.set(Calendar.MINUTE, 20);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
endTime = (Calendar) startTime.clone();
endTime.set(Calendar.HOUR_OF_DAY, 5);
endTime.set(Calendar.MINUTE, 0);
event = new WeekViewEvent(10, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_03));
events.add(event);
startTime = Calendar.getInstance();
startTime.set(Calendar.HOUR_OF_DAY, 5);
startTime.set(Calendar.MINUTE, 30);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
endTime = (Calendar) startTime.clone();
endTime.add(Calendar.HOUR_OF_DAY, 2);
endTime.set(Calendar.MONTH, newMonth-1);
event = new WeekViewEvent(2, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_02));
events.add(event);
startTime = Calendar.getInstance();
startTime.set(Calendar.HOUR_OF_DAY, 5);
startTime.set(Calendar.MINUTE, 0);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
startTime.add(Calendar.DATE, 1);
endTime = (Calendar) startTime.clone();
endTime.add(Calendar.HOUR_OF_DAY, 3);
endTime.set(Calendar.MONTH, newMonth - 1);
event = new WeekViewEvent(3, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_03));
events.add(event);
startTime = Calendar.getInstance();
startTime.set(Calendar.DAY_OF_MONTH, 15);
startTime.set(Calendar.HOUR_OF_DAY, 3);
startTime.set(Calendar.MINUTE, 0);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
endTime = (Calendar) startTime.clone();
endTime.add(Calendar.HOUR_OF_DAY, 3);
event = new WeekViewEvent(4, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_04));
events.add(event);
startTime = Calendar.getInstance();
startTime.set(Calendar.DAY_OF_MONTH, 1);
startTime.set(Calendar.HOUR_OF_DAY, 3);
startTime.set(Calendar.MINUTE, 0);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
endTime = (Calendar) startTime.clone();
endTime.add(Calendar.HOUR_OF_DAY, 3);
event = new WeekViewEvent(5, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_01));
events.add(event);
startTime = Calendar.getInstance();
startTime.set(Calendar.DAY_OF_MONTH, startTime.getActualMaximum(Calendar.DAY_OF_MONTH));
startTime.set(Calendar.HOUR_OF_DAY, 15);
startTime.set(Calendar.MINUTE, 0);
startTime.set(Calendar.MONTH, newMonth-1);
startTime.set(Calendar.YEAR, newYear);
endTime = (Calendar) startTime.clone();
endTime.add(Calendar.HOUR_OF_DAY, 3);
event = new WeekViewEvent(5, getEventTitle(startTime), startTime, endTime);
event.setColor(getResources().getColor(R.color.event_color_02));
events.add(event);
return events;
}
private String getEventTitle(Calendar time) {
return String.format("Event of %02d:%02d %s/%d", time.get(Calendar.HOUR_OF_DAY), time.get(Calendar.MINUTE), time.get(Calendar.MONTH)+1, time.get(Calendar.DAY_OF_MONTH));
}
@Override
public void onEventClick(WeekViewEvent event, RectF eventRect) {
Toast.makeText(MainActivity.this, "Clicked " + event.getName(), Toast.LENGTH_SHORT).show();
}
@Override
public void onEventLongPress(WeekViewEvent event, RectF eventRect) {
Toast.makeText(MainActivity.this, "Long pressed event: " + event.getName(), Toast.LENGTH_SHORT).show();
}
}
| apache-2.0 |
daimajia/ExoMedia | library/src/main/java/com/devbrackets/android/exomedia/DefaultControls.java | 20361 | /*
* Copyright (C) 2015 Brian Wernick
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.devbrackets.android.exomedia;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Handler;
import android.support.annotation.DrawableRes;
import android.support.annotation.Nullable;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.util.AttributeSet;
import android.view.View;
import android.view.animation.AlphaAnimation;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.SeekBar;
import android.widget.TextView;
import com.devbrackets.android.exomedia.event.EMMediaNextEvent;
import com.devbrackets.android.exomedia.event.EMMediaPlayPauseEvent;
import com.devbrackets.android.exomedia.event.EMMediaPreviousEvent;
import com.devbrackets.android.exomedia.event.EMMediaProgressEvent;
import com.devbrackets.android.exomedia.event.EMVideoViewControlVisibilityEvent;
import com.devbrackets.android.exomedia.listener.EMVideoViewControlsCallback;
import com.devbrackets.android.exomedia.util.EMEventBus;
import com.devbrackets.android.exomedia.util.TimeFormatUtil;
/**
* This is a simple abstraction for the EMVideoView to have a single "View" to add
* or remove for the Default Video Controls.
*/
public class DefaultControls extends RelativeLayout {
private static final long CONTROL_VISIBILITY_ANIMATION_LENGTH = 300;
public interface SeekCallbacks {
boolean onSeekStarted();
boolean onSeekEnded(int seekTime);
}
private TextView currentTime;
private TextView endTime;
private SeekBar seekBar;
private ImageButton playPauseButton;
private ImageButton previousButton;
private ImageButton nextButton;
private ProgressBar loadingProgress;
private EMVideoViewControlsCallback callback;
private boolean busPostHandlesEvent = false;
private Drawable defaultPlayDrawable;
private Drawable defaultPauseDrawable;
private Drawable defaultPreviousDrawable;
private Drawable defaultNextDrawable;
//Remember, 0 is not a valid resourceId
private int playResourceId = 0;
private int pauseResourceId = 0;
private boolean previousButtonRemoved = true;
private boolean nextButtonRemoved = true;
private boolean pausedForSeek = false;
private long hideDelay = -1;
private boolean userInteracting = false;
private boolean isVisible = true;
private boolean canViewHide = true;
private Handler visibilityHandler = new Handler();
private EMVideoView videoView;
@Nullable
private EMEventBus bus;
private SeekCallbacks seekCallbacks;
public DefaultControls(Context context) {
super(context);
setup(context);
}
public DefaultControls(Context context, AttributeSet attrs) {
super(context, attrs);
setup(context);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public DefaultControls(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
setup(context);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public DefaultControls(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
setup(context);
}
/**
* Sets the bus to use for dispatching Events that correspond to the callbacks
* listed in {@link com.devbrackets.android.exomedia.listener.EMVideoViewControlsCallback}
*
* @param bus The EventBus to dispatch events on
*/
public void setBus(@Nullable EMEventBus bus) {
this.bus = bus;
}
/**
* Sets the parent view to use for determining playback length, position,
* state, etc. This should only be called once, during the setup process
*
* @param EMVideoView The Parent view to these controls
*/
public void setVideoView(EMVideoView EMVideoView) {
this.videoView = EMVideoView;
}
/**
* Specifies the callback to use for informing the host app of click events
*
* @param callback The callback
*/
public void setVideoViewControlsCallback(EMVideoViewControlsCallback callback) {
this.callback = callback;
}
/**
* Used to update the control view visibilities to indicate that the video
* is loading. This is different from using {@link #loadCompleted()} and {@link #restartLoading()}
* because those update additional information.
*
* @param isLoading True if loading progress should be shown
*/
public void setLoading(boolean isLoading) {
playPauseButton.setVisibility(isLoading ? View.GONE : View.VISIBLE);
previousButton.setVisibility(isLoading || previousButtonRemoved ? View.INVISIBLE : View.VISIBLE);
nextButton.setVisibility(isLoading || nextButtonRemoved ? View.INVISIBLE : View.VISIBLE);
loadingProgress.setVisibility(isLoading ? View.VISIBLE : View.INVISIBLE);
}
/**
* Used to inform the controls to finalize their setup. This
* means replacing the loading animation with the PlayPause button
*/
public void loadCompleted() {
setLoading(false);
updatePlayPauseImage(videoView.isPlaying());
}
/**
* Used to inform the controls to return to the loading stage.
* This is the opposite of {@link #loadCompleted()}
*/
public void restartLoading() {
setLoading(true);
}
/**
* Sets the callbacks to inform of progress seek events
*
* @param callbacks The callbacks to inform
*/
public void setSeekCallbacks(@Nullable SeekCallbacks callbacks) {
this.seekCallbacks = callbacks;
}
/**
* Sets the current video position, updating the seek bar
* and the current time field
*
* @param position The position in milliseconds
*/
public void setPosition(long position) {
currentTime.setText(TimeFormatUtil.formatMs(position));
seekBar.setProgress((int) position);
}
/**
* Sets the video duration in Milliseconds to display
* at the end of the progress bar
*
* @param duration The duration of the video in milliseconds
*/
public void setDuration(long duration) {
if (duration != seekBar.getMax()) {
endTime.setText(TimeFormatUtil.formatMs(duration));
seekBar.setMax((int) duration);
}
}
/**
* Performs the progress update on the current time field,
* and the seek bar
*
* @param event The most recent progress
*/
public void setProgressEvent(EMMediaProgressEvent event) {
if (!userInteracting) {
seekBar.setSecondaryProgress((int) (seekBar.getMax() * event.getBufferPercentFloat()));
seekBar.setProgress((int) event.getPosition());
currentTime.setText(TimeFormatUtil.formatMs(event.getPosition()));
}
}
/**
* Sets the resource id's to use for the PlayPause button.
*
* @param playResourceId The resourceId or 0
* @param pauseResourceId The resourceId or 0
*/
public void setPlayPauseImages(@DrawableRes int playResourceId, @DrawableRes int pauseResourceId) {
this.playResourceId = playResourceId;
this.pauseResourceId = pauseResourceId;
updatePlayPauseImage(videoView != null && videoView.isPlaying());
}
/**
* Sets the state list drawable resource id to use for the Previous button.
*
* @param resourceId The resourceId or 0
*/
public void setPreviousImageResource(@DrawableRes int resourceId) {
if (resourceId != 0) {
previousButton.setImageResource(resourceId);
} else {
previousButton.setImageDrawable(defaultPreviousDrawable);
}
}
/**
* Sets the state list drawable resource id to use for the Next button.
*
* @param resourceId The resourceId or 0
*/
public void setNextImageResource(@DrawableRes int resourceId) {
if (resourceId != 0) {
nextButton.setImageResource(resourceId);
} else {
nextButton.setImageDrawable(defaultNextDrawable);
}
}
/**
* Makes sure the playPause button represents the correct playback state
*
* @param isPlaying If the video is currently playing
*/
public void updatePlayPauseImage(boolean isPlaying) {
if (isPlaying) {
if (pauseResourceId != 0) {
playPauseButton.setImageResource(pauseResourceId);
} else {
playPauseButton.setImageDrawable(defaultPauseDrawable);
}
} else {
if (playResourceId != 0) {
playPauseButton.setImageResource(playResourceId);
} else {
playPauseButton.setImageDrawable(defaultPlayDrawable);
}
}
}
/**
* Sets the button state for the Previous button. This will just
* change the images specified with {@link #setPreviousImageResource(int)},
* or use the defaults if they haven't been set, and block any click events.
* <p>
* This method will NOT re-add buttons that have previously been removed with
* {@link #setNextButtonRemoved(boolean)}.
*
* @param enabled If the Previous button is enabled [default: false]
*/
public void setPreviousButtonEnabled(boolean enabled) {
previousButton.setEnabled(enabled);
}
/**
* Sets the button state for the Next button. This will just
* change the images specified with {@link #setNextImageResource(int)},
* or use the defaults if they haven't been set, and block any click events.
* <p>
* This method will NOT re-add buttons that have previously been removed with
* {@link #setPreviousButtonRemoved(boolean)}.
*
* @param enabled If the Next button is enabled [default: false]
*/
public void setNextButtonEnabled(boolean enabled) {
nextButton.setEnabled(enabled);
}
/**
* Adds or removes the Previous button. This will change the visibility
* of the button, if you want to change the enabled/disabled images see {@link #setPreviousButtonEnabled(boolean)}
*
* @param removed If the Previous button should be removed [default: true]
*/
public void setPreviousButtonRemoved(boolean removed) {
previousButton.setVisibility(removed ? View.INVISIBLE : View.VISIBLE);
previousButtonRemoved = removed;
}
/**
* Adds or removes the Next button. This will change the visibility
* of the button, if you want to change the enabled/disabled images see {@link #setNextButtonEnabled(boolean)}
*
* @param removed If the Next button should be removed [default: true]
*/
public void setNextButtonRemoved(boolean removed) {
nextButton.setVisibility(removed ? View.INVISIBLE : View.VISIBLE);
nextButtonRemoved = removed;
}
/**
* Immediately starts the animation to show the controls
*/
public void show() {
//Makes sure we don't have a hide animation scheduled
visibilityHandler.removeCallbacksAndMessages(null);
clearAnimation();
animateVisibility(true);
}
/**
* After the specified delay the view will be hidden. If the user is interacting
* with the controls then we wait until after they are done to start the delay.
*
* @param delay The delay in milliseconds to wait to start the hide animation
*/
public void hideDelayed(long delay) {
hideDelay = delay;
if (delay < 0 || !canViewHide) {
return;
}
//If the user is interacting with controls we don't want to start the delayed hide yet
if (userInteracting) {
return;
}
visibilityHandler.postDelayed(new Runnable() {
@Override
public void run() {
animateVisibility(false);
}
}, delay);
}
/**
* Sets weather this control can be hidden.
*
* @param canHide If this control can be hidden [default: true]
*/
public void setCanHide(boolean canHide) {
canViewHide = canHide;
}
/**
* Sets weather the control functionality should treat the button clicks
* as handled when a bus event is posted. This is to make Bus events
* act like the callbacks set with {@link #setVideoViewControlsCallback(EMVideoViewControlsCallback)}
*
* @param finish True if the Bus events should act as handling the button clicks
*/
public void setFinishOnBusEvents(boolean finish) {
busPostHandlesEvent = finish;
}
/**
* Updates the drawables used for the buttons to AppCompatTintDrawables
*/
private void updateButtonDrawables() {
defaultPlayDrawable = DrawableCompat.wrap(getDrawable(R.drawable.exomedia_ic_play_arrow_white));
DrawableCompat.setTintList(defaultPlayDrawable, getResources().getColorStateList(R.color.exomedia_default_controls_button_selector));
defaultPauseDrawable = DrawableCompat.wrap(getDrawable(R.drawable.exomedia_ic_pause_white));
DrawableCompat.setTintList(defaultPauseDrawable, getResources().getColorStateList(R.color.exomedia_default_controls_button_selector));
playPauseButton.setImageDrawable(defaultPlayDrawable);
defaultPreviousDrawable = DrawableCompat.wrap(getDrawable(R.drawable.exomedia_ic_skip_previous_white));
DrawableCompat.setTintList(defaultPreviousDrawable, getResources().getColorStateList(R.color.exomedia_default_controls_button_selector));
previousButton.setImageDrawable(defaultPreviousDrawable);
defaultNextDrawable = DrawableCompat.wrap(getDrawable(R.drawable.exomedia_ic_skip_next_white));
DrawableCompat.setTintList(defaultNextDrawable, getResources().getColorStateList(R.color.exomedia_default_controls_button_selector));
nextButton.setImageDrawable(defaultNextDrawable);
}
private Drawable getDrawable(@DrawableRes int resourceId) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
return getResources().getDrawable(resourceId, getContext().getTheme());
}
//noinspection deprecation - depreciated in API 22
return getResources().getDrawable(resourceId);
}
/**
* Performs the functionality when the PlayPause button is clicked. This
* includes invoking the callback method if it is enabled, posting the bus
* event, and toggling the video playback.
*/
private void onPlayPauseClick() {
if (callback != null && callback.onPlayPauseClicked()) {
return;
}
if (bus != null) {
bus.post(new EMMediaPlayPauseEvent());
if (busPostHandlesEvent) {
return;
}
}
//toggles the playback
boolean playing = videoView.isPlaying();
if (playing) {
videoView.pause();
} else {
videoView.start();
}
}
private void onPreviousClick() {
if (callback != null && callback.onPreviousClicked()) {
return;
}
if (bus != null) {
bus.post(new EMMediaPreviousEvent());
}
}
private void onNextClick() {
if (callback != null && callback.onNextClicked()) {
return;
}
if (bus != null) {
bus.post(new EMMediaNextEvent());
}
}
private void setup(Context context) {
View.inflate(context, R.layout.exomedia_video_controls_overlay, this);
currentTime = (TextView) findViewById(R.id.exomedia_controls_current_time);
endTime = (TextView) findViewById(R.id.exomedia_controls_end_time);
seekBar = (SeekBar) findViewById(R.id.exomedia_controls_video_seek);
playPauseButton = (ImageButton) findViewById(R.id.exomedia_controls_play_pause_btn);
previousButton = (ImageButton) findViewById(R.id.exomedia_controls_previous_btn);
nextButton = (ImageButton) findViewById(R.id.exomedia_controls_next_btn);
loadingProgress = (ProgressBar) findViewById(R.id.exomedia_controls_video_loading);
seekBar.setOnSeekBarChangeListener(new SeekBarChanged());
playPauseButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
onPlayPauseClick();
}
});
previousButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
onPreviousClick();
}
});
nextButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
onNextClick();
}
});
updateButtonDrawables();
}
/**
* Performs the functionality to inform the callback and post bus events
* that the DefaultControls visibility has changed
*/
private void onVisibilityChanged() {
boolean handled = false;
if (callback != null) {
if (isVisible) {
handled = callback.onControlsShown();
} else {
handled = callback.onControlsHidden();
}
}
if (!handled && bus != null) {
bus.post(new EMVideoViewControlVisibilityEvent(isVisible));
}
}
/**
* Performs the control visibility animation for showing or hiding
* this view
*
* @param toVisible True if the view should be visible at the end of the animation
*/
private void animateVisibility(boolean toVisible) {
if (isVisible == toVisible) {
return;
}
float startAlpha = toVisible ? 0 : 1;
float endAlpha = toVisible ? 1 : 0;
AlphaAnimation animation = new AlphaAnimation(startAlpha, endAlpha);
animation.setDuration(CONTROL_VISIBILITY_ANIMATION_LENGTH);
animation.setFillAfter(true);
startAnimation(animation);
isVisible = toVisible;
onVisibilityChanged();
}
/**
* Listens to the seek bar change events and correctly handles the changes
*/
private class SeekBarChanged implements SeekBar.OnSeekBarChangeListener {
private int seekToTime;
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (!fromUser) {
return;
}
seekToTime = progress;
if (seekCallbacks != null && seekCallbacks.onSeekStarted()) {
return;
}
if (currentTime != null) {
currentTime.setText(TimeFormatUtil.formatMs(progress));
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
userInteracting = true;
if (videoView.isPlaying()) {
pausedForSeek = true;
videoView.pause();
}
//Make sure to keep the controls visible during seek
show();
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
userInteracting = false;
if (seekCallbacks != null && seekCallbacks.onSeekEnded(seekToTime)) {
return;
}
videoView.seekTo(seekToTime);
if (pausedForSeek) {
pausedForSeek = false;
videoView.start();
hideDelayed(hideDelay);
}
}
}
} | apache-2.0 |
littleJava/mybatis-maven-generator | src/main/java/org/mybatis/generator/config/xml/ParserErrorHandler.java | 2588 | /*
* Copyright 2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.config.xml;
import static org.mybatis.generator.internal.util.messages.Messages.getString;
import java.util.List;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
/**
* The Class ParserErrorHandler.
*
* @author Jeff Butler
*/
public class ParserErrorHandler implements ErrorHandler {
/** The warnings. */
private List<String> warnings;
/** The errors. */
private List<String> errors;
/**
* Instantiates a new parser error handler.
*
* @param warnings
* the warnings
* @param errors
* the errors
*/
public ParserErrorHandler(List<String> warnings, List<String> errors) {
super();
this.warnings = warnings;
this.errors = errors;
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.ErrorHandler#warning(org.xml.sax.SAXParseException)
*/
public void warning(SAXParseException exception) throws SAXException {
warnings.add(getString("Warning.7", //$NON-NLS-1$
Integer.toString(exception.getLineNumber()), exception
.getMessage()));
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.ErrorHandler#error(org.xml.sax.SAXParseException)
*/
public void error(SAXParseException exception) throws SAXException {
errors.add(getString("RuntimeError.4", //$NON-NLS-1$
Integer.toString(exception.getLineNumber()), exception
.getMessage()));
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.ErrorHandler#fatalError(org.xml.sax.SAXParseException)
*/
public void fatalError(SAXParseException exception) throws SAXException {
errors.add(getString("RuntimeError.4", //$NON-NLS-1$
Integer.toString(exception.getLineNumber()), exception
.getMessage()));
}
}
| apache-2.0 |
rvenkatesh25/bookkeeper | hedwig-server/src/test/java/org/apache/hedwig/server/subscriptions/StubSubscriptionManager.java | 2169 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hedwig.server.subscriptions;
import java.util.concurrent.ScheduledExecutorService;
import com.google.protobuf.ByteString;
import org.apache.hedwig.exceptions.PubSubException;
import org.apache.hedwig.protocol.PubSubProtocol.MessageSeqId;
import org.apache.hedwig.protocol.PubSubProtocol.SubscribeRequest;
import org.apache.hedwig.server.common.ServerConfiguration;
import org.apache.hedwig.server.persistence.PersistenceManager;
import org.apache.hedwig.server.topics.TopicManager;
import org.apache.hedwig.util.Callback;
public class StubSubscriptionManager extends InMemorySubscriptionManager {
boolean fail = false;
public void setFail(boolean fail) {
this.fail = fail;
}
public StubSubscriptionManager(TopicManager tm, PersistenceManager pm, ServerConfiguration conf, ScheduledExecutorService scheduler) {
super(tm, pm, conf, scheduler);
}
@Override
public void serveSubscribeRequest(ByteString topic, SubscribeRequest subRequest, MessageSeqId consumeSeqId,
Callback<MessageSeqId> callback, Object ctx) {
if (fail) {
callback.operationFailed(ctx, new PubSubException.ServiceDownException("Asked to fail"));
return;
}
super.serveSubscribeRequest(topic, subRequest, consumeSeqId, callback, ctx);
}
}
| apache-2.0 |
treasure-data/presto | presto-pinot/src/test/java/io/prestosql/pinot/TestBrokerQueries.java | 6120 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.pinot;
import com.google.common.collect.ImmutableList;
import io.prestosql.pinot.client.PinotClient;
import io.prestosql.pinot.client.PinotClient.BrokerResultRow;
import io.prestosql.pinot.client.PinotClient.ResultsIterator;
import io.prestosql.pinot.query.PinotQuery;
import io.prestosql.spi.Page;
import io.prestosql.spi.block.Block;
import org.apache.pinot.common.response.broker.BrokerResponseNative;
import org.apache.pinot.common.response.broker.ResultTable;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.common.utils.DataSchema.ColumnDataType;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.prestosql.pinot.TestPinotSplitManager.createSessionWithNumSplits;
import static io.prestosql.pinot.client.PinotClient.fromResultTable;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static org.apache.pinot.common.utils.DataSchema.ColumnDataType.LONG;
import static org.apache.pinot.common.utils.DataSchema.ColumnDataType.STRING;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
public class TestBrokerQueries
extends TestPinotQueryBase
{
private static final BrokerResponseNative RESPONSE;
private static final DataSchema DATA_SCHEMA;
private static final List<Object[]> TEST_DATA;
private static final ResultTable RESULT_TABLE;
private PinotClient testingPinotClient;
static
{
DATA_SCHEMA = new DataSchema(new String[]{"col_1", "col_2", "col_3"}, new ColumnDataType[]{STRING, LONG, STRING});
TEST_DATA = ImmutableList.of(new Object[] {"col_1_data", 2L, "col_3_data"});
RESULT_TABLE = new ResultTable(DATA_SCHEMA, TEST_DATA);
RESPONSE = new BrokerResponseNative();
RESPONSE.setResultTable(RESULT_TABLE);
RESPONSE.setNumServersQueried(1);
RESPONSE.setNumServersResponded(1);
}
@BeforeClass
public void setup()
throws Exception
{
testingPinotClient = new MockPinotClient(pinotConfig, getTestingMetadata(), RESPONSE.toJsonString());
}
@Test
public void testBrokerColumnMapping()
{
List<PinotColumnHandle> columnHandles = ImmutableList.<PinotColumnHandle>builder()
.add(new PinotColumnHandle("col_3", VARCHAR))
.add(new PinotColumnHandle("col_1", VARCHAR))
.add(new PinotColumnHandle("col_2", BIGINT))
.build();
ResultsIterator resultIterator = fromResultTable(RESULT_TABLE, columnHandles);
assertTrue(resultIterator.hasNext(), "resultIterator is empty");
BrokerResultRow row = resultIterator.next();
assertEquals(row.getField(0), "col_3_data");
assertEquals(row.getField(1), "col_1_data");
assertEquals(row.getField(2), 2L);
}
@Test
public void testBrokerColumnMappingWithSubset()
{
List<PinotColumnHandle> columnHandles = ImmutableList.<PinotColumnHandle>builder()
.add(new PinotColumnHandle("col_3", VARCHAR))
.add(new PinotColumnHandle("col_1", VARCHAR))
.build();
ResultsIterator resultIterator = fromResultTable(RESULT_TABLE, columnHandles);
assertTrue(resultIterator.hasNext(), "resultIterator is empty");
BrokerResultRow row = resultIterator.next();
assertEquals(row.getField(0), "col_3_data");
assertEquals(row.getField(1), "col_1_data");
}
@Test
public void testBrokerQuery()
{
List<PinotColumnHandle> columnHandles = ImmutableList.<PinotColumnHandle>builder()
.add(new PinotColumnHandle("col_1", VARCHAR))
.add(new PinotColumnHandle("col_2", BIGINT))
.add(new PinotColumnHandle("col_3", VARCHAR))
.build();
PinotBrokerPageSource pageSource = new PinotBrokerPageSource(createSessionWithNumSplits(1, false, pinotConfig),
new PinotQuery("test_table", "SELECT col_1, col_2, col_3 FROM test_table", 0),
columnHandles,
testingPinotClient);
Page page = pageSource.getNextPage();
assertEquals(page.getChannelCount(), columnHandles.size());
assertEquals(page.getPositionCount(), RESPONSE.getResultTable().getRows().size());
Block block = page.getBlock(0);
String value = block.getSlice(0, 0, block.getSliceLength(0)).toStringUtf8();
assertEquals(value, getOnlyElement(RESPONSE.getResultTable().getRows())[0]);
block = page.getBlock(1);
assertEquals(block.getLong(0, 0), (long) getOnlyElement(RESPONSE.getResultTable().getRows())[1]);
block = page.getBlock(2);
value = block.getSlice(0, 0, block.getSliceLength(0)).toStringUtf8();
assertEquals(value, getOnlyElement(RESPONSE.getResultTable().getRows())[2]);
}
@Test
public void testCountStarBrokerQuery()
{
PinotBrokerPageSource pageSource = new PinotBrokerPageSource(createSessionWithNumSplits(1, false, pinotConfig),
new PinotQuery("test_table", "SELECT COUNT(*) FROM test_table", 0),
ImmutableList.of(),
testingPinotClient);
Page page = pageSource.getNextPage();
assertEquals(page.getPositionCount(), RESPONSE.getResultTable().getRows().size());
assertEquals(page.getChannelCount(), 0);
}
}
| apache-2.0 |
noslowerdna/kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/KeyValueToTimestampedKeyValueByteStoreAdapter.java | 4463 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import java.util.List;
import static org.apache.kafka.streams.state.TimestampedBytesStore.convertToTimestampedFormat;
import static org.apache.kafka.streams.state.internals.ValueAndTimestampDeserializer.rawValue;
/**
* This class is used to ensure backward compatibility at DSL level between
* {@link org.apache.kafka.streams.state.TimestampedKeyValueStore} and {@link KeyValueStore}.
* <p>
* If a user provides a supplier for plain {@code KeyValueStores} via
* {@link org.apache.kafka.streams.kstream.Materialized#as(KeyValueBytesStoreSupplier)} this adapter is used to
* translate between old a new {@code byte[]} format of the value.
*
* @see KeyValueToTimestampedKeyValueIteratorAdapter
*/
public class KeyValueToTimestampedKeyValueByteStoreAdapter implements KeyValueStore<Bytes, byte[]> {
final KeyValueStore<Bytes, byte[]> store;
KeyValueToTimestampedKeyValueByteStoreAdapter(final KeyValueStore<Bytes, byte[]> store) {
if (!store.persistent()) {
throw new IllegalArgumentException("Provided store must be a persistent store, but it is not.");
}
this.store = store;
}
@Override
public void put(final Bytes key,
final byte[] valueWithTimestamp) {
store.put(key, valueWithTimestamp == null ? null : rawValue(valueWithTimestamp));
}
@Override
public byte[] putIfAbsent(final Bytes key,
final byte[] valueWithTimestamp) {
return convertToTimestampedFormat(store.putIfAbsent(
key,
valueWithTimestamp == null ? null : rawValue(valueWithTimestamp)));
}
@Override
public void putAll(final List<KeyValue<Bytes, byte[]>> entries) {
for (final KeyValue<Bytes, byte[]> entry : entries) {
final byte[] valueWithTimestamp = entry.value;
store.put(entry.key, valueWithTimestamp == null ? null : rawValue(valueWithTimestamp));
}
}
@Override
public byte[] delete(final Bytes key) {
return convertToTimestampedFormat(store.delete(key));
}
@Override
public String name() {
return store.name();
}
@Override
public void init(final ProcessorContext context,
final StateStore root) {
store.init(context, root);
}
@Override
public void flush() {
store.flush();
}
@Override
public void close() {
store.close();
}
@Override
public boolean persistent() {
return true;
}
@Override
public boolean isOpen() {
return store.isOpen();
}
@Override
public byte[] get(final Bytes key) {
return convertToTimestampedFormat(store.get(key));
}
@Override
public KeyValueIterator<Bytes, byte[]> range(final Bytes from,
final Bytes to) {
return new KeyValueToTimestampedKeyValueIteratorAdapter<>(store.range(from, to));
}
@Override
public KeyValueIterator<Bytes, byte[]> all() {
return new KeyValueToTimestampedKeyValueIteratorAdapter<>(store.all());
}
@Override
public long approximateNumEntries() {
return store.approximateNumEntries();
}
} | apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-cognitoidp/src/main/java/com/amazonaws/services/cognitoidp/model/transform/AddCustomAttributesResultJsonUnmarshaller.java | 1732 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cognitoidp.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.cognitoidp.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* AddCustomAttributesResult JSON Unmarshaller
*/
public class AddCustomAttributesResultJsonUnmarshaller implements
Unmarshaller<AddCustomAttributesResult, JsonUnmarshallerContext> {
public AddCustomAttributesResult unmarshall(JsonUnmarshallerContext context)
throws Exception {
AddCustomAttributesResult addCustomAttributesResult = new AddCustomAttributesResult();
return addCustomAttributesResult;
}
private static AddCustomAttributesResultJsonUnmarshaller instance;
public static AddCustomAttributesResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new AddCustomAttributesResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
justintung/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java | 16375 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.io.IOException;
import java.io.InputStream;
import java.io.FileNotFoundException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PositionedReadable;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.hbase.util.FSUtils;
/**
* The FileLink is a sort of hardlink, that allows access to a file given a set of locations.
*
* <p><b>The Problem:</b>
* <ul>
* <li>
* HDFS doesn't have support for hardlinks, and this make impossible to referencing
* the same data blocks using different names.
* </li>
* <li>
* HBase store files in one location (e.g. table/region/family/) and when the file is not
* needed anymore (e.g. compaction, region deletion, ...) moves it to an archive directory.
* </li>
* </ul>
* If we want to create a reference to a file, we need to remember that it can be in its
* original location or in the archive folder.
* The FileLink class tries to abstract this concept and given a set of locations
* it is able to switch between them making this operation transparent for the user.
* {@link HFileLink} is a more concrete implementation of the {@code FileLink}.
*
* <p><b>Back-references:</b>
* To help the {@link org.apache.hadoop.hbase.master.cleaner.CleanerChore} to keep track of
* the links to a particular file, during the {@code FileLink} creation, a new file is placed
* inside a back-reference directory. There's one back-reference directory for each file that
* has links, and in the directory there's one file per link.
*
* <p>HFileLink Example
* <ul>
* <li>
* /hbase/table/region-x/cf/file-k
* (Original File)
* </li>
* <li>
* /hbase/table-cloned/region-y/cf/file-k.region-x.table
* (HFileLink to the original file)
* </li>
* <li>
* /hbase/table-2nd-cloned/region-z/cf/file-k.region-x.table
* (HFileLink to the original file)
* </li>
* <li>
* /hbase/.archive/table/region-x/.links-file-k/region-y.table-cloned
* (Back-reference to the link in table-cloned)
* </li>
* <li>
* /hbase/.archive/table/region-x/.links-file-k/region-z.table-2nd-cloned
* (Back-reference to the link in table-2nd-cloned)
* </li>
* </ul>
*/
@InterfaceAudience.Private
public class FileLink {
private static final Log LOG = LogFactory.getLog(FileLink.class);
/** Define the Back-reference directory name prefix: .links-<hfile>/ */
public static final String BACK_REFERENCES_DIRECTORY_PREFIX = ".links-";
/**
* FileLink InputStream that handles the switch between the original path
* and the alternative locations, when the file is moved.
*/
private static class FileLinkInputStream extends InputStream
implements Seekable, PositionedReadable {
private FSDataInputStream in = null;
private Path currentPath = null;
private long pos = 0;
private final FileLink fileLink;
private final int bufferSize;
private final FileSystem fs;
public FileLinkInputStream(final FileSystem fs, final FileLink fileLink)
throws IOException {
this(fs, fileLink, FSUtils.getDefaultBufferSize(fs));
}
public FileLinkInputStream(final FileSystem fs, final FileLink fileLink, int bufferSize)
throws IOException {
this.bufferSize = bufferSize;
this.fileLink = fileLink;
this.fs = fs;
this.in = tryOpen();
}
@Override
public int read() throws IOException {
int res;
try {
res = in.read();
} catch (FileNotFoundException e) {
res = tryOpen().read();
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
res = tryOpen().read();
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
res = tryOpen().read();
}
if (res > 0) pos += 1;
return res;
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int n;
try {
n = in.read(b, off, len);
} catch (FileNotFoundException e) {
n = tryOpen().read(b, off, len);
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
n = tryOpen().read(b, off, len);
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
n = tryOpen().read(b, off, len);
}
if (n > 0) pos += n;
assert(in.getPos() == pos);
return n;
}
@Override
public int read(long position, byte[] buffer, int offset, int length) throws IOException {
int n;
try {
n = in.read(position, buffer, offset, length);
} catch (FileNotFoundException e) {
n = tryOpen().read(position, buffer, offset, length);
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
n = tryOpen().read(position, buffer, offset, length);
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
n = tryOpen().read(position, buffer, offset, length);
}
return n;
}
@Override
public void readFully(long position, byte[] buffer) throws IOException {
readFully(position, buffer, 0, buffer.length);
}
@Override
public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
try {
in.readFully(position, buffer, offset, length);
} catch (FileNotFoundException e) {
tryOpen().readFully(position, buffer, offset, length);
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
tryOpen().readFully(position, buffer, offset, length);
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
tryOpen().readFully(position, buffer, offset, length);
}
}
@Override
public long skip(long n) throws IOException {
long skipped;
try {
skipped = in.skip(n);
} catch (FileNotFoundException e) {
skipped = tryOpen().skip(n);
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
skipped = tryOpen().skip(n);
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
skipped = tryOpen().skip(n);
}
if (skipped > 0) pos += skipped;
return skipped;
}
@Override
public int available() throws IOException {
try {
return in.available();
} catch (FileNotFoundException e) {
return tryOpen().available();
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
return tryOpen().available();
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
return tryOpen().available();
}
}
@Override
public void seek(long pos) throws IOException {
try {
in.seek(pos);
} catch (FileNotFoundException e) {
tryOpen().seek(pos);
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
tryOpen().seek(pos);
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
tryOpen().seek(pos);
}
this.pos = pos;
}
@Override
public long getPos() throws IOException {
return pos;
}
@Override
public boolean seekToNewSource(long targetPos) throws IOException {
boolean res;
try {
res = in.seekToNewSource(targetPos);
} catch (FileNotFoundException e) {
res = tryOpen().seekToNewSource(targetPos);
} catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt()
res = tryOpen().seekToNewSource(targetPos);
} catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt()
res = tryOpen().seekToNewSource(targetPos);
}
if (res) pos = targetPos;
return res;
}
@Override
public void close() throws IOException {
in.close();
}
@Override
public synchronized void mark(int readlimit) {
}
@Override
public synchronized void reset() throws IOException {
throw new IOException("mark/reset not supported");
}
@Override
public boolean markSupported() {
return false;
}
/**
* Try to open the file from one of the available locations.
*
* @return FSDataInputStream stream of the opened file link
* @throws IOException on unexpected error, or file not found.
*/
private FSDataInputStream tryOpen() throws IOException {
for (Path path: fileLink.getLocations()) {
if (path.equals(currentPath)) continue;
try {
in = fs.open(path, bufferSize);
if (pos != 0) in.seek(pos);
assert(in.getPos() == pos) : "Link unable to seek to the right position=" + pos;
if (LOG.isTraceEnabled()) {
if (currentPath == null) {
LOG.debug("link open path=" + path);
} else {
LOG.trace("link switch from path=" + currentPath + " to path=" + path);
}
}
currentPath = path;
return(in);
} catch (FileNotFoundException e) {
// Try another file location
}
}
throw new FileNotFoundException("Unable to open link: " + fileLink);
}
}
private Path[] locations = null;
protected FileLink() {
this.locations = null;
}
/**
* @param originPath Original location of the file to link
* @param alternativePaths Alternative locations to look for the linked file
*/
public FileLink(Path originPath, Path... alternativePaths) {
setLocations(originPath, alternativePaths);
}
/**
* @param locations locations to look for the linked file
*/
public FileLink(final Collection<Path> locations) {
this.locations = locations.toArray(new Path[locations.size()]);
}
/**
* @return the locations to look for the linked file.
*/
public Path[] getLocations() {
return locations;
}
@Override
public String toString() {
StringBuilder str = new StringBuilder(getClass().getName());
str.append(" locations=[");
for (int i = 0; i < locations.length; ++i) {
if (i > 0) str.append(", ");
str.append(locations[i].toString());
}
str.append("]");
return str.toString();
}
/**
* @return true if the file pointed by the link exists
*/
public boolean exists(final FileSystem fs) throws IOException {
for (int i = 0; i < locations.length; ++i) {
if (fs.exists(locations[i])) {
return true;
}
}
return false;
}
/**
* @return the path of the first available link.
*/
public Path getAvailablePath(FileSystem fs) throws IOException {
for (int i = 0; i < locations.length; ++i) {
if (fs.exists(locations[i])) {
return locations[i];
}
}
throw new FileNotFoundException("Unable to open link: " + this);
}
/**
* Get the FileStatus of the referenced file.
*
* @param fs {@link FileSystem} on which to get the file status
* @return InputStream for the hfile link.
* @throws IOException on unexpected error.
*/
public FileStatus getFileStatus(FileSystem fs) throws IOException {
for (int i = 0; i < locations.length; ++i) {
try {
return fs.getFileStatus(locations[i]);
} catch (FileNotFoundException e) {
// Try another file location
}
}
throw new FileNotFoundException("Unable to open link: " + this);
}
/**
* Open the FileLink for read.
* <p>
* It uses a wrapper of FSDataInputStream that is agnostic to the location
* of the file, even if the file switches between locations.
*
* @param fs {@link FileSystem} on which to open the FileLink
* @return InputStream for reading the file link.
* @throws IOException on unexpected error.
*/
public FSDataInputStream open(final FileSystem fs) throws IOException {
return new FSDataInputStream(new FileLinkInputStream(fs, this));
}
/**
* Open the FileLink for read.
* <p>
* It uses a wrapper of FSDataInputStream that is agnostic to the location
* of the file, even if the file switches between locations.
*
* @param fs {@link FileSystem} on which to open the FileLink
* @param bufferSize the size of the buffer to be used.
* @return InputStream for reading the file link.
* @throws IOException on unexpected error.
*/
public FSDataInputStream open(final FileSystem fs, int bufferSize) throws IOException {
return new FSDataInputStream(new FileLinkInputStream(fs, this, bufferSize));
}
/**
* NOTE: This method must be used only in the constructor!
* It creates a List with the specified locations for the link.
*/
protected void setLocations(Path originPath, Path... alternativePaths) {
assert this.locations == null : "Link locations already set";
List<Path> paths = new ArrayList<Path>(alternativePaths.length +1);
if (originPath != null) {
paths.add(originPath);
}
for (int i = 0; i < alternativePaths.length; i++) {
if (alternativePaths[i] != null) {
paths.add(alternativePaths[i]);
}
}
this.locations = paths.toArray(new Path[0]);
}
/**
* Get the directory to store the link back references
*
* <p>To simplify the reference count process, during the FileLink creation
* a back-reference is added to the back-reference directory of the specified file.
*
* @param storeDir Root directory for the link reference folder
* @param fileName File Name with links
* @return Path for the link back references.
*/
public static Path getBackReferencesDir(final Path storeDir, final String fileName) {
return new Path(storeDir, BACK_REFERENCES_DIRECTORY_PREFIX + fileName);
}
/**
* Get the referenced file name from the reference link directory path.
*
* @param dirPath Link references directory path
* @return Name of the file referenced
*/
public static String getBackReferenceFileName(final Path dirPath) {
return dirPath.getName().substring(BACK_REFERENCES_DIRECTORY_PREFIX.length());
}
/**
* Checks if the specified directory path is a back reference links folder.
*
* @param dirPath Directory path to verify
* @return True if the specified directory is a link references folder
*/
public static boolean isBackReferencesDir(final Path dirPath) {
if (dirPath == null) return false;
return dirPath.getName().startsWith(BACK_REFERENCES_DIRECTORY_PREFIX);
}
@Override
public boolean equals(Object obj) {
// Assumes that the ordering of locations between objects are the same. This is true for the
// current subclasses already (HFileLink, WALLink). Otherwise, we may have to sort the locations
// or keep them presorted
if (this.getClass().equals(obj.getClass())) {
return Arrays.equals(this.locations, ((FileLink) obj).locations);
}
return false;
}
@Override
public int hashCode() {
return Arrays.hashCode(locations);
}
}
| apache-2.0 |
jgrivolla/uima-addons | Lucas/src/test/java/org/apache/uima/lucas/indexer/analysis/LowerCaseFilterFactoryTest.java | 1696 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.lucas.indexer.analysis;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.uima.lucas.indexer.test.util.DummyTokenStream;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertNotNull;
public class LowerCaseFilterFactoryTest {
private LowerCaseFilterFactory lowercaseFilterFactory;
private TokenStream tokenStream;
@Before
public void setUp() throws Exception {
this.lowercaseFilterFactory = new LowerCaseFilterFactory();
this.tokenStream = new DummyTokenStream("dummy", 1, 1, 0);
}
@Test
public void testCreateTokenFilter() throws IOException {
LowerCaseFilter lowercaseFilter = (LowerCaseFilter) lowercaseFilterFactory.createTokenFilter(tokenStream, null);
assertNotNull(lowercaseFilter);
}
}
| apache-2.0 |
karreiro/uberfire | uberfire-extensions/uberfire-layout-editor/uberfire-layout-editor-client/src/main/java/org/uberfire/ext/layout/editor/client/api/ModalConfigurationContext.java | 1562 | /*
* Copyright 2015 JBoss, by Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.layout.editor.client.api;
import java.util.function.Supplier;
import org.uberfire.ext.layout.editor.api.editor.LayoutComponent;
import org.uberfire.ext.layout.editor.api.editor.LayoutTemplate;
import org.uberfire.ext.layout.editor.client.infra.ConfigurationContext;
import org.uberfire.mvp.Command;
/**
* This class provides the context required during to configure a layout component using a modal screen
*/
public class ModalConfigurationContext extends ConfigurationContext {
public ModalConfigurationContext(LayoutComponent component,
Command configurationFinish,
Command configurationCanceled,
Supplier<LayoutTemplate> currentLayoutTemplateSupplier) {
super(component,
configurationFinish,
configurationCanceled,
currentLayoutTemplateSupplier);
}
}
| apache-2.0 |
igniterealtime/Smack | smack-sasl-provided/src/main/java/org/jivesoftware/smack/sasl/provided/SASLExternalMechanism.java | 2140 | /**
*
* Copyright © 2014 Daniele Ricci
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack.sasl.provided;
import javax.security.auth.callback.CallbackHandler;
import org.jivesoftware.smack.sasl.SASLMechanism;
import org.jivesoftware.smack.util.StringUtils;
import org.jxmpp.util.XmppStringUtils;
/**
* Implementation of the SASL-EXTERNAL authentication mechanism.
*
* @author Daniele Ricci
*/
public class SASLExternalMechanism extends SASLMechanism {
public static final String NAME = EXTERNAL;
@Override
protected void authenticateInternal(CallbackHandler cbh) {
// Do nothing. Auth will be done external to Smack and which will receive the localpart
// after the resource binding
}
@Override
protected byte[] getAuthenticationText() {
if (authorizationId != null) {
return toBytes(authorizationId.toString());
}
if (StringUtils.isNullOrEmpty(authenticationId)) {
return null;
}
return toBytes(XmppStringUtils.completeJidFrom(authenticationId, serviceName));
}
@Override
public String getName() {
return NAME;
}
@Override
public int getPriority() {
return 510;
}
@Override
protected SASLMechanism newInstance() {
return new SASLExternalMechanism();
}
@Override
public void checkIfSuccessfulOrThrow() {
// No check performed
}
@Override
public boolean authzidSupported() {
return true;
}
@Override
public boolean requiresPassword() {
return false;
}
}
| apache-2.0 |
apache/archiva | archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java | 4069 | package org.apache.archiva.indexer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.StorageAsset;
import java.io.IOException;
import java.net.URI;
import java.time.ZonedDateTime;
import java.util.Set;
/**
* This represents a indexing context that is used to manage the index of a certain repository.
*
*/
public interface ArchivaIndexingContext {
/**
* The identifier of the context
* @return
*/
String getId();
/**
* Returns the repository this index context is associated to.
* @return
*/
Repository getRepository();
/**
* The path where the index is stored.
* @return
*/
StorageAsset getPath();
/**
* Returns true, if the index has no entries or is not initialized.
* @return
*/
boolean isEmpty() throws IOException;
/**
* Writes the last changes to the index.
* @throws IOException
*/
void commit() throws IOException;
/**
* Throws away the last changes.
* @throws IOException
*/
void rollback() throws IOException;
/**
* Optimizes the index
* @throws IOException
*/
void optimize() throws IOException;
/**
* Closes any resources, this context has open.
* @param deleteFiles True, if the index files should be deleted.
* @throws IOException
*/
void close(boolean deleteFiles) throws IOException;
/**
* Closes the context without deleting the files.
* Is identical to <code>close(false)</code>
* @throws IOException
*/
void close() throws IOException;
/**
* Returns the status of this context. This method will return <code>false</code>, after the {@link #close()} method
* has been called.
*
* @return <code>true</code>, if the <code>close()</code> method has not been called, otherwise <code>false</code>
*/
boolean isOpen();
/**
* Removes all entries from the index. After this method finished,
* isEmpty() should return true.
* @throws IOException
*/
void purge() throws IOException;
/**
* Returns true, if this index implementation has support for the given repository specific
* implementation class.
* @param clazz
* @return
*/
boolean supports(Class<?> clazz);
/**
* Returns the repository specific implementation of the index. E.g. the maven index class.
* @param clazz the specific class
* @return the instance of the given class representing this index
* @throws UnsupportedOperationException if the implementation is not supported
*/
<T> T getBaseContext(Class<T> clazz) throws UnsupportedBaseContextException;
/**
* Returns the list of groups that are assigned to this index
* @return
*/
Set<String> getGroups() throws IOException;
/**
* Updates the timestamp of the index.
* @param save
* @throws IOException
*/
void updateTimestamp(boolean save) throws IOException;
/**
* Updates the timestamp with the given time.
* @param save
* @param time
* @throws IOException
*/
void updateTimestamp(boolean save, ZonedDateTime time) throws IOException;
}
| apache-2.0 |
azharhashmi/brooklyn | core/src/main/java/brooklyn/util/internal/ssh/SshAbstractTool.java | 5822 | package brooklyn.util.internal.ssh;
import static brooklyn.util.net.Networking.checkPortValid;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.File;
import java.util.Map;
import java.util.Set;
import brooklyn.util.os.Os;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
public abstract class SshAbstractTool extends ShellAbstractTool implements SshTool {
protected final String toString;
protected final String host;
protected final String user;
protected final String password;
protected final int port;
protected String privateKeyPassphrase;
protected String privateKeyData;
protected File privateKeyFile;
protected boolean strictHostKeyChecking;
protected boolean allocatePTY;
public static interface SshAction<T> {
void clear() throws Exception;
T create() throws Exception;
}
public static abstract class AbstractSshToolBuilder<T extends SshTool, B extends AbstractSshToolBuilder<T,B>> {
protected String host;
protected int port = 22;
protected String user = System.getProperty("user.name");
protected String password;
protected String privateKeyData;
protected String privateKeyPassphrase;
protected Set<String> privateKeyFiles = Sets.newLinkedHashSet();
protected boolean strictHostKeyChecking = false;
protected boolean allocatePTY = false;
protected File localTempDir = null;
@SuppressWarnings("unchecked")
protected B self() {
return (B) this;
}
public B from(Map<String,?> props) {
host = getMandatoryVal(props, PROP_HOST);
port = getOptionalVal(props, PROP_PORT);
user = getOptionalVal(props, PROP_USER);
password = getOptionalVal(props, PROP_PASSWORD);
warnOnDeprecated(props, "privateKey", "privateKeyData");
privateKeyData = getOptionalVal(props, PROP_PRIVATE_KEY_DATA);
privateKeyPassphrase = getOptionalVal(props, PROP_PRIVATE_KEY_PASSPHRASE);
// for backwards compatibility accept keyFiles and privateKey
// but sshj accepts only a single privateKeyFile; leave blank to use defaults (i.e. ~/.ssh/id_rsa and id_dsa)
warnOnDeprecated(props, "keyFiles", null);
String privateKeyFile = getOptionalVal(props, PROP_PRIVATE_KEY_FILE);
if (privateKeyFile != null) privateKeyFiles.add(privateKeyFile);
strictHostKeyChecking = getOptionalVal(props, PROP_STRICT_HOST_KEY_CHECKING);
allocatePTY = getOptionalVal(props, PROP_ALLOCATE_PTY);
String localTempDirPath = getOptionalVal(props, PROP_LOCAL_TEMP_DIR);
localTempDir = (localTempDirPath == null) ? null : new File(Os.tidyPath(localTempDirPath));
return self();
}
public B host(String val) {
this.host = val; return self();
}
public B user(String val) {
this.user = val; return self();
}
public B password(String val) {
this.password = val; return self();
}
public B port(int val) {
this.port = val; return self();
}
public B privateKeyPassphrase(String val) {
this.privateKeyPassphrase = val; return self();
}
/** @deprecated 1.4.0, use privateKeyData */
public B privateKey(String val) {
this.privateKeyData = val; return self();
}
public B privateKeyData(String val) {
this.privateKeyData = val; return self();
}
public B privateKeyFile(String val) {
this.privateKeyFiles.add(val); return self();
}
public B localTempDir(File val) {
this.localTempDir = val; return self();
}
public abstract T build();
}
protected SshAbstractTool(AbstractSshToolBuilder<?,?> builder) {
super(builder.localTempDir);
host = checkNotNull(builder.host, "host");
port = builder.port;
user = builder.user;
password = builder.password;
strictHostKeyChecking = builder.strictHostKeyChecking;
allocatePTY = builder.allocatePTY;
privateKeyPassphrase = builder.privateKeyPassphrase;
privateKeyData = builder.privateKeyData;
if (builder.privateKeyFiles.size() > 1) {
throw new IllegalArgumentException("sshj supports only a single private key-file; " +
"for defaults of ~/.ssh/id_rsa and ~/.ssh/id_dsa leave blank");
} else if (builder.privateKeyFiles.size() == 1) {
String privateKeyFileStr = Iterables.get(builder.privateKeyFiles, 0);
String amendedKeyFile = privateKeyFileStr.startsWith("~") ? (System.getProperty("user.home")+privateKeyFileStr.substring(1)) : privateKeyFileStr;
privateKeyFile = new File(amendedKeyFile);
} else {
privateKeyFile = null;
}
checkArgument(host.length() > 0, "host value must not be an empty string");
checkPortValid(port, "ssh port");
toString = String.format("%s@%s:%d", user, host, port);
}
@Override
public String toString() {
return toString;
}
public String getHostAddress() {
return this.host;
}
public String getUsername() {
return this.user;
}
protected SshException propagate(Exception e, String message) throws SshException {
throw new SshException("(" + toString() + ") " + message + ": " + e.getMessage(), e);
}
}
| apache-2.0 |
peter-gergely-horvath/kylo | metadata/metadata-api/src/main/java/com/thinkbiganalytics/metadata/api/template/FeedManagerTemplate.java | 2215 | package com.thinkbiganalytics.metadata.api.template;
/*-
* #%L
* thinkbig-metadata-api
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.Serializable;
import java.util.List;
import org.joda.time.DateTime;
import com.thinkbiganalytics.metadata.api.feed.Feed;
import com.thinkbiganalytics.metadata.api.security.AccessControlled;
/**
*/
public interface FeedManagerTemplate extends AccessControlled {
List<Feed> getFeeds();
boolean addFeed(Feed feed);
boolean removeFeed(Feed feed);
ID getId();
String getName();
void setName(String name);
String getNifiTemplateId();
void setNifiTemplateId(String nifiTemplateId);
String getDescription();
void setDescription(String description);
boolean isDefineTable();
void setDefineTable(boolean defineTable);
boolean isDataTransformation();
void setDataTransformation(boolean dataTransformation);
boolean isAllowPreconditions();
void setAllowPreconditions(boolean allowedPreconditions);
String getIcon();
void setIcon(String icon);
String getIconColor();
void setIconColor(String iconColor);
String getJson();
void setJson(String json);
DateTime getCreatedTime();
DateTime getModifiedTime();
State getState();
void setState(State state);
Long getOrder();
void setOrder(Long order);
boolean isStream();
void setStream(boolean stream);
String getTemplateTableOption();
void setTemplateTableOption(String templateTableOption);
enum State {
ENABLED, DISABLED
}
interface ID extends Serializable {
}
}
| apache-2.0 |
dasein-cloud/dasein-cloud-tier3 | src/main/java/org/dasein/cloud/tier3/compute/Tier3OS.java | 363 | package org.dasein.cloud.tier3.compute;
public class Tier3OS {
public int id;
public String name;
public int maxCpu;
public int maxMemory;
public Tier3OS(int id, String name, int maxCpu, int maxMemory) {
this.id = id;
this.name = name;
this.maxCpu = maxCpu;
this.maxMemory = maxMemory;
}
}
| apache-2.0 |
flownclouds/modeshape | modeshape-jcr/src/main/java/org/modeshape/jcr/cache/document/LocalDocumentStore.java | 16085 | /*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.jcr.cache.document;
import java.io.Serializable;
import java.util.Collection;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.transaction.HeuristicMixedException;
import javax.transaction.HeuristicRollbackException;
import javax.transaction.NotSupportedException;
import javax.transaction.RollbackException;
import javax.transaction.SystemException;
import javax.transaction.TransactionManager;
import javax.transaction.xa.XAResource;
import org.infinispan.Cache;
import org.infinispan.distexec.DistributedCallable;
import org.infinispan.schematic.SchematicDb;
import org.infinispan.schematic.SchematicEntry;
import org.infinispan.schematic.document.Document;
import org.infinispan.schematic.document.EditableDocument;
import org.modeshape.common.SystemFailureException;
import org.modeshape.jcr.InfinispanUtil;
import org.modeshape.jcr.InfinispanUtil.Combiner;
import org.modeshape.jcr.InfinispanUtil.Location;
import org.modeshape.jcr.value.Name;
import org.modeshape.jcr.value.binary.ExternalBinaryValue;
/**
* An implementation of {@link DocumentStore} which always uses the local cache to store/retrieve data and which provides some
* additional methods for exposing local cache information.
*
* @author Horia Chiorean (hchiorea@redhat.com)
*/
public class LocalDocumentStore implements DocumentStore {
private final SchematicDb database;
private String localSourceKey;
/**
* Creates a new local store with the given database
*
* @param database a {@link SchematicDb} instance which must be non-null.
*/
public LocalDocumentStore( SchematicDb database ) {
this.database = database;
}
@Override
public boolean containsKey( String key ) {
return database.containsKey(key);
}
@Override
public SchematicEntry get( String key ) {
return database.get(key);
}
@Override
public SchematicEntry storeDocument( String key,
Document document ) {
return putIfAbsent(key, document);
}
@Override
public void updateDocument( String key,
Document document,
SessionNode sessionNode ) {
// do nothing, the way the local store updates is via deltas
}
@Override
public String newDocumentKey( String parentKey,
Name documentName,
Name documentPrimaryType ) {
// the local store doesn't generate explicit keys for new nodes
return null;
}
/**
* Store the supplied document and metadata at the given key.
*
* @param key the key or identifier for the document
* @param document the document that is to be stored
* @return the existing entry for the supplied key, or null if there was no entry and the put was successful
* @see SchematicDb#putIfAbsent(String, org.infinispan.schematic.document.Document,
* org.infinispan.schematic.document.Document)
*/
public SchematicEntry putIfAbsent( String key,
Document document ) {
return database.putIfAbsent(key, document, null);
}
/**
* Store the supplied document and metadata at the given key.
*
* @param key the key or identifier for the document
* @param document the document that is to be stored
* @see SchematicDb#put(String, org.infinispan.schematic.document.Document, org.infinispan.schematic.document.Document)
*/
public void put( String key,
Document document ) {
database.put(key, document, null);
}
/**
* Store the supplied document in the local db
*
* @param entryDocument the document that contains the metadata document, content document, and key
*/
public void put( Document entryDocument ) {
database.put(entryDocument);
}
/**
* Replace the existing document and metadata at the given key with the document that is supplied. This method does nothing if
* there is not an existing entry at the given key.
*
* @param key the key or identifier for the document
* @param document the new document that is to replace the existing document (or binary content) the replacement
*/
public void replace( String key,
Document document ) {
database.replace(key, document, null);
}
@Override
public boolean remove( String key ) {
return database.remove(key) != null;
}
@Override
public boolean prepareDocumentsForUpdate( Collection<String> keys ) {
return database.lock(keys);
}
@Override
public boolean updatesRequirePreparing() {
return database.isExplicitLockingEnabled();
}
@Override
public LocalDocumentStore localStore() {
return this;
}
@Override
public TransactionManager transactionManager() {
return localCache().getAdvancedCache().getTransactionManager();
}
@Override
public XAResource xaResource() {
return localCache().getAdvancedCache().getXAResource();
}
@Override
public void setLocalSourceKey( String sourceKey ) {
this.localSourceKey = sourceKey;
}
@Override
public String getLocalSourceKey() {
return this.localSourceKey;
}
@Override
public String createExternalProjection( String projectedNodeKey,
String sourceName,
String externalPath,
String alias ) {
throw new UnsupportedOperationException("External projections are not supported in the local document store");
}
@Override
public Document getChildrenBlock( String key ) {
// Look up the information in the database ...
SchematicEntry entry = get(key);
if (entry == null) {
// There is no such node ...
return null;
}
return entry.getContentAsDocument();
}
@Override
public Document getChildReference( String parentKey,
String childKey ) {
return null; // don't support this
}
/**
* Returns the local Infinispan cache.
*
* @return a {@code non-null} {@link Cache} instance.
*/
public Cache<String, SchematicEntry> localCache() {
return database.getCache();
}
@Override
public ExternalBinaryValue getExternalBinary( String sourceName,
String id ) {
throw new UnsupportedOperationException("External binaries are only supported by the federated document store");
}
/**
* Perform the supplied operation on each stored document that is accessible within this process. Each document will be
* operated upon in a separate transaction, which will be committed if the operation is successful or rolledback if the
* operation cannot be complete successfully.
* <p>
* Generally, this method executes the operation upon all documents. If there is an error processing a single document, that
* document is skipped and the execution will continue with the next document(s). However, if there is an exception with the
* transactions or another system failure, this method will terminate with an exception.
*
* @param operation the operation to be performed
* @return the summary of the number of documents that were affected
* @throws InterruptedException if the process is interrupted
* @throws ExecutionException if there is an error while getting executing the operation
*/
public DocumentOperationResults performOnEachDocument( DocumentOperation operation )
throws InterruptedException, ExecutionException {
DistributedOperation distOp = new DistributedOperation(operation);
return InfinispanUtil.execute(database.getCache(), Location.LOCALLY, distOp, distOp);
}
/**
* An operation upon a persisted document.
*/
public static abstract class DocumentOperation implements Serializable {
private static final long serialVersionUID = 1L;
protected Cache<String, SchematicEntry> cache;
/**
* Invoked by execution environment after the operation has been migrated for execution to a specific Infinispan node.
*
* @param cache cache whose keys are used as input data for this DistributedCallable task
*/
public void setEnvironment( Cache<String, SchematicEntry> cache ) {
this.cache = cache;
}
/**
* Execute the operation upon the given {@link EditableDocument}.
*
* @param key the document's key; never null
* @param document the editable document; never null
* @return true if the operation modified the document, or false otherwise
*/
public abstract boolean execute( String key,
EditableDocument document );
}
public static class DocumentOperationResults implements Serializable {
private static final long serialVersionUID = 1L;
private long modifiedCount;
private long unmodifiedCount;
private long skipCount;
private long failureCount;
/**
* Return the number of documents that were successfully updated/modified by the operation.
*
* @return the number of modified documents
*/
public long getModifiedCount() {
return modifiedCount;
}
/**
* Return the number of documents that were not updated/modified by the operation.
*
* @return the number of unmodified documents
*/
public long getUnmodifiedCount() {
return unmodifiedCount;
}
/**
* Return the number of documents that caused some failure.
*
* @return the number of failed documents
*/
public long getFailureCount() {
return failureCount;
}
/**
* Return the number of documents that were skipped by the operation because the document could not be obtained in an
* timely fashion.
*
* @return the number of skipped documents
*/
public long getSkipCount() {
return skipCount;
}
protected void recordModified() {
++modifiedCount;
}
protected void recordUnmodified() {
++unmodifiedCount;
}
protected void recordFailure() {
++failureCount;
}
protected void recordSkipped() {
++skipCount;
}
protected DocumentOperationResults combine( DocumentOperationResults other ) {
if (other != null) {
this.modifiedCount += other.modifiedCount;
this.unmodifiedCount += other.unmodifiedCount;
this.skipCount += other.skipCount;
this.failureCount += other.failureCount;
}
return this;
}
@Override
public String toString() {
return "" + modifiedCount + " documents changed, " + unmodifiedCount + " unchanged, " + skipCount + " skipped, and "
+ failureCount + " resulted in errors or failures";
}
}
protected static class DistributedOperation
implements DistributedCallable<String, SchematicEntry, DocumentOperationResults>, Serializable,
Combiner<DocumentOperationResults> {
private static final long serialVersionUID = 1L;
private transient Cache<String, SchematicEntry> cache;
private transient Set<String> inputKeys;
private transient TransactionManager txnMgr;
private transient DocumentOperation operation;
protected DistributedOperation( DocumentOperation operation ) {
this.operation = operation;
}
@Override
public void setEnvironment( Cache<String, SchematicEntry> cache,
Set<String> inputKeys ) {
assert this.cache != null;
assert this.inputKeys != null;
this.cache = cache;
this.inputKeys = inputKeys;
this.txnMgr = this.cache.getAdvancedCache().getTransactionManager();
this.operation.setEnvironment(this.cache);
}
@Override
public DocumentOperationResults call() throws Exception {
DocumentOperationResults results = new DocumentOperationResults();
for (String key : inputKeys) {
// We operate upon each document within a transaction ...
try {
txnMgr.begin();
SchematicEntry entry = cache.get(key);
EditableDocument doc = entry.editDocumentContent();
if (operation.execute(key, doc)) {
results.recordModified();
} else {
results.recordUnmodified();
}
txnMgr.commit();
} catch (org.infinispan.util.concurrent.TimeoutException e) {
// Couldn't wait long enough for the lock, so skip this for now ...
results.recordSkipped();
} catch (NotSupportedException err) {
// No nested transactions are supported ...
results.recordFailure();
throw new SystemFailureException(err);
} catch (SecurityException err) {
// No privilege to commit ...
results.recordFailure();
throw new SystemFailureException(err);
} catch (IllegalStateException err) {
// Not associated with a txn??
results.recordFailure();
throw new SystemFailureException(err);
} catch (RollbackException err) {
// Couldn't be committed, but the txn is already rolled back ...
results.recordFailure();
} catch (HeuristicMixedException err) {
// Rollback has occurred ...
results.recordFailure();
} catch (HeuristicRollbackException err) {
// Rollback has occurred ...
results.recordFailure();
} catch (SystemException err) {
// System failed unexpectedly ...
results.recordFailure();
throw new SystemFailureException(err);
} catch (Throwable t) {
// any other exception/error we should rollback and just continue (skipping this key for now) ...
txnMgr.rollback();
results.recordFailure();
continue;
}
}
return results;
}
@Override
public DocumentOperationResults combine( DocumentOperationResults priorResult,
DocumentOperationResults newResult ) {
return priorResult.combine(newResult);
}
}
}
| apache-2.0 |
mafulafunk/wicket | wicket-core/src/main/java/org/apache/wicket/AttributeModifier.java | 12302 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket;
import java.io.Serializable;
import org.apache.wicket.behavior.AttributeAppender;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.parser.XmlTag.TagType;
import org.apache.wicket.model.IComponentAssignedModel;
import org.apache.wicket.model.IDetachable;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.util.io.IClusterable;
import org.apache.wicket.util.lang.Args;
import org.apache.wicket.util.value.IValueMap;
/**
* This class allows a tag attribute of a component to be modified dynamically with a value obtained
* from a model object. This concept can be used to programmatically alter the attributes of
* components, overriding the values specified in the markup. The two primary uses of this class are
* to allow overriding of markup attributes based on business logic and to support dynamic
* localization. The replacement occurs as the component tag is rendered to the response.
* <p>
* The attribute whose value is to be modified must be given on construction of the instance of this
* class along with the model containing the value to replace with.
* <p>
* If an attribute is not in the markup, this modifier will add an attribute.
* <p>
* Instances of this class should be added to components via the {@link Component#add(Behavior...)}
* method after the component has been constructed.
* <p>
* It is possible to create new subclasses of {@code AttributeModifier} by overriding the
* {@link #newValue(String, String)} method. For example, you could create an
* {@code AttributeModifier} subclass which appends the replacement value like this:
*
* <pre>
* new AttributeModifier("myAttribute", model)
* {
* protected String newValue(final String currentValue, final String replacementValue)
* {
* return currentValue + replacementValue;
* }
* };
* </pre>
*
* @author Chris Turner
* @author Eelco Hillenius
* @author Jonathan Locke
* @author Martijn Dashorst
* @author Ralf Ebert
*/
public class AttributeModifier extends Behavior implements IClusterable
{
/** Marker value to have an attribute without a value added. */
public static final String VALUELESS_ATTRIBUTE_ADD = new String("VA_ADD");
/** Marker value to have an attribute without a value removed. */
public static final String VALUELESS_ATTRIBUTE_REMOVE = new String("VA_REMOVE");
private static final long serialVersionUID = 1L;
/** Attribute specification. */
private final String attribute;
/** The model that is to be used for the replacement. */
private final IModel<?> replaceModel;
/**
* Create a new attribute modifier with the given attribute name and model to replace with. The
* additional boolean flag specifies whether to add the attribute if it is not present.
*
* @param attribute
* The attribute name to replace the value for
* @param addAttributeIfNotPresent
* Whether to add the attribute if it is not present
* @param replaceModel
* The model to replace the value with
* @deprecated AttributeModifier will now always add the attribute if not present, use
* {@link #AttributeModifier(String, IModel)} instead
*/
@Deprecated
public AttributeModifier(final String attribute, final boolean addAttributeIfNotPresent,
final IModel<?> replaceModel)
{
this(attribute, replaceModel);
}
/**
* Create a new attribute modifier with the given attribute name and model to replace with. The
* attribute will be added with the model value or the value will be replaced with the model
* value if the attribute is already present.
*
* @param attribute
* The attribute name to replace the value for
* @param replaceModel
* The model to replace the value with
*/
public AttributeModifier(final String attribute, final IModel<?> replaceModel)
{
Args.notNull(attribute, "attribute");
this.attribute = attribute;
this.replaceModel = replaceModel;
}
/**
* Create a new attribute modifier with the given attribute name and model to replace with. The
* attribute will be added with the model value or the value will be replaced with the value if
* the attribute is already present.
*
* @param attribute
* The attribute name to replace the value for
* @param value
* The value for the attribute
*/
public AttributeModifier(String attribute, Serializable value)
{
this(attribute, Model.of(value));
}
/**
* Detach the value if it was a {@link IDetachable}. Internal method, shouldn't be called from
* the outside. If the attribute modifier is shared, the detach method will be called multiple
* times.
*
* @param component
* the model that initiates the detachment
*/
@Override
public final void detach(Component component)
{
if (replaceModel != null)
replaceModel.detach();
}
/**
* @return the attribute name to replace the value for
*/
public final String getAttribute()
{
return attribute;
}
@Override
public final void onComponentTag(Component component, ComponentTag tag)
{
if (tag.getType() != TagType.CLOSE)
replaceAttributeValue(component, tag);
}
/**
* Checks the given component tag for an instance of the attribute to modify and if all criteria
* are met then replace the value of this attribute with the value of the contained model
* object.
*
* @param component
* The component
* @param tag
* The tag to replace the attribute value for
*/
public final void replaceAttributeValue(final Component component, final ComponentTag tag)
{
if (isEnabled(component))
{
final IValueMap attributes = tag.getAttributes();
final Object replacementValue = getReplacementOrNull(component);
if (VALUELESS_ATTRIBUTE_ADD == replacementValue)
{
attributes.put(attribute, null);
}
else if (VALUELESS_ATTRIBUTE_REMOVE == replacementValue)
{
attributes.remove(attribute);
}
else
{
final String value = toStringOrNull(attributes.get(attribute));
final String newValue = newValue(value, toStringOrNull(replacementValue));
if (newValue != null)
{
attributes.put(attribute, newValue);
}
}
}
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
return "[AttributeModifier attribute=" + attribute + ", replaceModel=" + replaceModel + "]";
}
/**
* gets replacement with null check.
*
* @param component
* @return replacement value
*/
private Object getReplacementOrNull(final Component component)
{
IModel<?> model = replaceModel;
if (model instanceof IComponentAssignedModel)
{
model = ((IComponentAssignedModel<?>)model).wrapOnAssignment(component);
}
return (model != null) ? model.getObject() : null;
}
/**
* gets replacement as a string with null check.
*
* @param replacementValue
* @return replacement value as a string
*/
private String toStringOrNull(final Object replacementValue)
{
return (replacementValue != null) ? replacementValue.toString() : null;
}
/**
* Gets the replacement model. Allows subclasses access to replace model.
*
* @return the replace model of this attribute modifier
*/
protected final IModel<?> getReplaceModel()
{
return replaceModel;
}
/**
* Gets the value that should replace the current attribute value. This gives users the ultimate
* means to customize what will be used as the attribute value. For instance, you might decide
* to append the replacement value to the current instead of just replacing it as is Wicket's
* default.
*
* @param currentValue
* The current attribute value. This value might be null!
* @param replacementValue
* The replacement value. This value might be null!
* @return The value that should replace the current attribute value
*/
protected String newValue(final String currentValue, final String replacementValue)
{
return replacementValue;
}
/**
* Creates a attribute modifier that replaces the current value with the given value.
*
* @param attributeName
* @param value
* @return the attribute modifier
* @since 1.5
*/
public static AttributeModifier replace(String attributeName, IModel<?> value)
{
Args.notEmpty(attributeName, "attributeName");
return new AttributeModifier(attributeName, value);
}
/**
* Creates a attribute modifier that replaces the current value with the given value.
*
* @param attributeName
* @param value
* @return the attribute modifier
* @since 1.5
*/
public static AttributeModifier replace(String attributeName, Serializable value)
{
Args.notEmpty(attributeName, "attributeName");
return new AttributeModifier(attributeName, value);
}
/**
* Creates a attribute modifier that appends the current value with the given {@code value}
* using a default space character (' ') separator.
*
* @param attributeName
* @param value
* @return the attribute modifier
* @since 1.5
* @see AttributeAppender
*/
public static AttributeAppender append(String attributeName, IModel<?> value)
{
Args.notEmpty(attributeName, "attributeName");
return new AttributeAppender(attributeName, value).setSeparator(" ");
}
/**
* Creates a attribute modifier that appends the current value with the given {@code value}
* using a default space character (' ') separator.
*
* @param attributeName
* @param value
* @return the attribute modifier
* @since 1.5
* @see AttributeAppender
*/
public static AttributeAppender append(String attributeName, Serializable value)
{
Args.notEmpty(attributeName, "attributeName");
return append(attributeName, Model.of(value));
}
/**
* Creates a attribute modifier that prepends the current value with the given {@code value}
* using a default space character (' ') separator.
*
* @param attributeName
* @param value
* @return the attribute modifier
* @since 1.5
* @see AttributeAppender
*/
public static AttributeAppender prepend(String attributeName, IModel<?> value)
{
Args.notEmpty(attributeName, "attributeName");
return new AttributeAppender(attributeName, value)
{
private static final long serialVersionUID = 1L;
@Override
protected String newValue(String currentValue, String replacementValue)
{
// swap currentValue and replacementValue in the call to the concatenator
return super.newValue(replacementValue, currentValue);
}
}.setSeparator(" ");
}
/**
* Creates a attribute modifier that prepends the current value with the given {@code value}
* using a default space character (' ') separator.
*
* @param attributeName
* @param value
* @return the attribute modifier
* @since 1.5
* @see AttributeAppender
*/
public static AttributeAppender prepend(String attributeName, Serializable value)
{
Args.notEmpty(attributeName, "attributeName");
return prepend(attributeName, Model.of(value));
}
/**
* Creates a attribute modifier that removes an attribute with the specified name
*
* @param attributeName
* the name of the attribute to be removed
* @return the attribute modifier
* @since 1.5
*/
public static AttributeModifier remove(String attributeName)
{
Args.notEmpty(attributeName, "attributeName");
return replace(attributeName, Model.of(VALUELESS_ATTRIBUTE_REMOVE));
}
}
| apache-2.0 |
dkschlos/super-csv-declarative | super-csv-declarative/src/main/java/com/github/dmn1k/supercsv/io/declarative/provider/ConvertCellProcessorProvider.java | 2162 | /*
* Copyright 2007 Kasper B. Graversen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dmn1k.supercsv.io.declarative.provider;
import com.github.dmn1k.supercsv.model.CellProcessorFactory;
import com.github.dmn1k.supercsv.model.DeclarativeCellProcessorProvider;
import com.github.dmn1k.supercsv.model.ProcessingMetadata;
import com.github.dmn1k.supercsv.internal.util.ReflectionUtilsExt;
import org.supercsv.cellprocessor.ift.CellProcessor;
/**
* CellProcessorProvider for {@link com.github.dmn1k.supercsv.io.declarative.annotation.Convert}
*
* @since 2.5
* @author Dominik Schlosser
*/
public class ConvertCellProcessorProvider implements DeclarativeCellProcessorProvider<com.github.dmn1k.supercsv.io.declarative.annotation.Convert> {
/**
* {@inheritDoc}
*/
@Override
public CellProcessorFactory create(ProcessingMetadata<com.github.dmn1k.supercsv.io.declarative.annotation.Convert> metadata) {
return new CellProcessorFactory() {
@Override
public int getOrder() {
return metadata.getAnnotation().order();
}
@Override
public CellProcessor create(CellProcessor next) {
return new com.github.dmn1k.supercsv.cellprocessor.Convert(ReflectionUtilsExt.instantiateBean(metadata.getAnnotation().value()),
next);
}
};
}
/**
* {@inheritDoc}
*/
@Override
public Class<com.github.dmn1k.supercsv.io.declarative.annotation.Convert> getType() {
return com.github.dmn1k.supercsv.io.declarative.annotation.Convert.class;
}
}
| apache-2.0 |
cbeust/jcommander | src/test/java/com/beust/jcommander/DefaultValueTest.java | 3901 | /**
* Copyright (C) 2010 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.beust.jcommander;
import com.beust.jcommander.internal.Lists;
import com.beust.jcommander.internal.Sets;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Test behaviour of default parameter values
* @author rodionmoiseev
*/
public class DefaultValueTest {
@Test
public void emptyDefaultValueForListParameterStaysEmptyIfNotAssignedOrIsSetOtherwise() {
MyOptsWithEmptyDefaults opts = new MyOptsWithEmptyDefaults();
JCommander cmd = new JCommander(opts);
cmd.parse("-a", "anotherValue");
Assert.assertEquals(opts.list.size(), 1);
Assert.assertEquals(opts.list.get(0), "anotherValue");
Assert.assertEquals(opts.set.size(), 0);
}
@Test
public void defaultValueForListParametersGetsOverwrittenWithSpecifiedValueOrStaysAsDefaultOtherwise() {
MyOptsWithDefaultValues opts = new MyOptsWithDefaultValues();
JCommander cmd = new JCommander(opts);
cmd.parse("-a", "anotherValue");
Assert.assertEquals(opts.list.size(), 1);
Assert.assertEquals(opts.list.get(0), "anotherValue");
Assert.assertEquals(opts.set.size(), 1);
Assert.assertEquals(opts.set.iterator().next(), "defaultValue");
}
@Test
public void anyNumberOfValuesCanBeSetToListParameters_ForEmptyDefaults(){
MyOptsWithEmptyDefaults opts = new MyOptsWithEmptyDefaults();
testSettingMultipleValuesToListTypeParameters(opts);
}
@Test
public void anyNumberOfValuesCanBeSetToListParameters_ForNonEmptyDefaults(){
MyOptsWithDefaultValues opts = new MyOptsWithDefaultValues();
testSettingMultipleValuesToListTypeParameters(opts);
}
private void testSettingMultipleValuesToListTypeParameters(MyOpts opts) {
JCommander cmd = new JCommander(opts);
cmd.parse("-a", "anotherValue", "-a", "anotherValue2",
"-b", "anotherValue3", "-b", "anotherValue4");
Assert.assertEquals(opts.list.size(), 2);
Assert.assertEquals(opts.list.get(0), "anotherValue");
Assert.assertEquals(opts.list.get(1), "anotherValue2");
Assert.assertEquals(opts.set.size(), 2);
Iterator<String> arg2it = opts.set.iterator();
Assert.assertEquals(arg2it.next(), "anotherValue3");
Assert.assertEquals(arg2it.next(), "anotherValue4");
}
public static class MyOpts {
@Parameter(names = "-a")
public List<String> list;
@Parameter(names = "-b")
public Set<String> set;
}
public static final class MyOptsWithDefaultValues extends MyOpts {
public MyOptsWithDefaultValues(){
this.list = singletonList("defaultValue");
this.set = singletonSet("defaultValue");
}
}
public static final class MyOptsWithEmptyDefaults extends MyOpts {
public MyOptsWithEmptyDefaults(){
this.list = Lists.newArrayList();
this.set = Sets.newLinkedHashSet();
}
}
public static final List<String> singletonList(String value) {
List<String> list = Lists.newArrayList();
list.add(value);
return list;
}
public static final Set<String> singletonSet(String value){
Set<String> set = Sets.newLinkedHashSet();
set.add(value);
return set;
}
}
| apache-2.0 |
OpenSkywalking/skywalking | apm-webapp/src/main/java/org/apache/skywalking/oap/server/webapp/ApplicationStartUp.java | 1150 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.webapp;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class ApplicationStartUp {
public static void main(String[] args) {
SpringApplication.run(ApplicationStartUp.class, args);
}
}
| apache-2.0 |
Saulis/gerrit | gerrit-server/src/main/java/com/google/gerrit/server/plugins/Plugin.java | 4497 | // Copyright (C) 2012 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.plugins;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.extensions.registration.RegistrationHandle;
import com.google.gerrit.extensions.registration.ReloadableRegistrationHandle;
import com.google.gerrit.lifecycle.LifecycleManager;
import com.google.gerrit.server.PluginUser;
import com.google.inject.Injector;
import org.eclipse.jgit.internal.storage.file.FileSnapshot;
import java.io.File;
import java.util.Collections;
import java.util.List;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
public abstract class Plugin {
public static enum ApiType {
EXTENSION, PLUGIN, JS
}
/** Unique key that changes whenever a plugin reloads. */
public static final class CacheKey {
private final String name;
CacheKey(String name) {
this.name = name;
}
@Override
public String toString() {
int id = System.identityHashCode(this);
return String.format("Plugin[%s@%x]", name, id);
}
}
static ApiType getApiType(Manifest manifest) throws InvalidPluginException {
Attributes main = manifest.getMainAttributes();
String v = main.getValue("Gerrit-ApiType");
if (Strings.isNullOrEmpty(v)
|| ApiType.EXTENSION.name().equalsIgnoreCase(v)) {
return ApiType.EXTENSION;
} else if (ApiType.PLUGIN.name().equalsIgnoreCase(v)) {
return ApiType.PLUGIN;
} else if (ApiType.JS.name().equalsIgnoreCase(v)) {
return ApiType.JS;
} else {
throw new InvalidPluginException("Invalid Gerrit-ApiType: " + v);
}
}
private final String name;
private final File srcFile;
private final ApiType apiType;
private final boolean disabled;
private final CacheKey cacheKey;
private final PluginUser pluginUser;
private final FileSnapshot snapshot;
protected LifecycleManager manager;
private List<ReloadableRegistrationHandle<?>> reloadableHandles;
public Plugin(String name,
File srcFile,
PluginUser pluginUser,
FileSnapshot snapshot,
ApiType apiType) {
this.name = name;
this.srcFile = srcFile;
this.apiType = apiType;
this.snapshot = snapshot;
this.pluginUser = pluginUser;
this.cacheKey = new Plugin.CacheKey(name);
this.disabled = srcFile.getName().endsWith(".disabled");
}
PluginUser getPluginUser() {
return pluginUser;
}
public File getSrcFile() {
return srcFile;
}
public String getName() {
return name;
}
@Nullable
public abstract String getVersion();
public ApiType getApiType() {
return apiType;
}
public Plugin.CacheKey getCacheKey() {
return cacheKey;
}
public boolean isDisabled() {
return disabled;
}
abstract void start(PluginGuiceEnvironment env) throws Exception;
abstract void stop(PluginGuiceEnvironment env);
public abstract JarFile getJarFile();
public abstract Injector getSysInjector();
@Nullable
public abstract Injector getSshInjector();
@Nullable
public abstract Injector getHttpInjector();
public void add(RegistrationHandle handle) {
if (manager != null) {
if (handle instanceof ReloadableRegistrationHandle) {
if (reloadableHandles == null) {
reloadableHandles = Lists.newArrayList();
}
reloadableHandles.add((ReloadableRegistrationHandle<?>) handle);
}
manager.add(handle);
}
}
List<ReloadableRegistrationHandle<?>> getReloadableHandles() {
if (reloadableHandles != null) {
return reloadableHandles;
}
return Collections.emptyList();
}
@Override
public String toString() {
return "Plugin [" + name + "]";
}
abstract boolean canReload();
boolean isModified(File jar) {
return snapshot.lastModified() != jar.lastModified();
}
}
| apache-2.0 |
robertoschwald/cas | core/cas-server-core-tickets-api/src/main/java/org/apereo/cas/ticket/support/MultiTimeUseOrTimeoutExpirationPolicy.java | 4826 | package org.apereo.cas.ticket.support;
import org.apereo.cas.ticket.TicketState;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.springframework.util.Assert;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
/**
* ExpirationPolicy that is based on certain number of uses of a ticket or a
* certain time period for a ticket to exist.
*
* @author Scott Battaglia
* @since 3.0.0
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
@Slf4j
@NoArgsConstructor
@EqualsAndHashCode(callSuper = true)
public class MultiTimeUseOrTimeoutExpirationPolicy extends AbstractCasExpirationPolicy {
private static final long serialVersionUID = -5704993954986738308L;
@JsonProperty(value = "timeToLive")
private long timeToKillInSeconds;
@JsonProperty("numberOfUses")
private int numberOfUses;
/**
* Instantiates a new multi time use or timeout expiration policy.
*
* @param numberOfUses the number of uses
* @param timeToKillInSeconds the time to kill in seconds
*/
@JsonCreator
public MultiTimeUseOrTimeoutExpirationPolicy(@JsonProperty("numberOfUses") final int numberOfUses, @JsonProperty("timeToLive") final long timeToKillInSeconds) {
this.timeToKillInSeconds = timeToKillInSeconds;
this.numberOfUses = numberOfUses;
Assert.isTrue(this.numberOfUses > 0, "numberOfUses must be greater than 0.");
Assert.isTrue(this.timeToKillInSeconds > 0, "timeToKillInSeconds must be greater than 0.");
}
@Override
public boolean isExpired(final TicketState ticketState) {
if (ticketState == null) {
LOGGER.debug("Ticket state is null for [{}]. Ticket has expired.", this.getClass().getSimpleName());
return true;
}
val countUses = ticketState.getCountOfUses();
if (countUses >= this.numberOfUses) {
LOGGER.debug("Ticket usage count [{}] is greater than or equal to [{}]. Ticket has expired", countUses, this.numberOfUses);
return true;
}
val systemTime = getCurrentSystemTime();
val lastTimeUsed = ticketState.getLastTimeUsed();
val expirationTime = lastTimeUsed.plus(this.timeToKillInSeconds, ChronoUnit.SECONDS);
if (systemTime.isAfter(expirationTime)) {
LOGGER.debug("Ticket has expired because the difference between current time [{}] and ticket time [{}] is greater than or equal to [{}].",
systemTime, lastTimeUsed, this.timeToKillInSeconds);
return true;
}
return super.isExpired(ticketState);
}
/**
* Gets current system time.
*
* @return the current system time
*/
protected ZonedDateTime getCurrentSystemTime() {
return ZonedDateTime.now(ZoneOffset.UTC);
}
@Override
public Long getTimeToLive() {
return this.timeToKillInSeconds;
}
@JsonIgnore
@Override
public Long getTimeToIdle() {
return 0L;
}
/**
* The Proxy ticket expiration policy.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
public static class ProxyTicketExpirationPolicy extends MultiTimeUseOrTimeoutExpirationPolicy {
private static final long serialVersionUID = -5814201080268311070L;
/**
* Instantiates a new proxy ticket expiration policy.
*
* @param numberOfUses the number of uses
* @param timeToKillInSeconds the time to kill in seconds
*/
@JsonCreator
public ProxyTicketExpirationPolicy(@JsonProperty("numberOfUses") final int numberOfUses, @JsonProperty("timeToLive") final long timeToKillInSeconds) {
super(numberOfUses, timeToKillInSeconds);
}
}
/**
* The Service ticket expiration policy.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
public static class ServiceTicketExpirationPolicy extends MultiTimeUseOrTimeoutExpirationPolicy {
private static final long serialVersionUID = -5814201080268311070L;
/**
* Instantiates a new Service ticket expiration policy.
*
* @param numberOfUses the number of uses
* @param timeToKillInSeconds the time to kill in seconds
*/
@JsonCreator
public ServiceTicketExpirationPolicy(@JsonProperty("numberOfUses") final int numberOfUses, @JsonProperty("timeToLive") final long timeToKillInSeconds) {
super(numberOfUses, timeToKillInSeconds);
}
}
}
| apache-2.0 |
Twister915/Trident | src/main/java/net/tridentsdk/server/packets/play/out/PacketPlayOutUpdateEntityNBT.java | 1191 | /*
* Trident - A Multithreaded Server Alternative
* Copyright 2014 The TridentSDK Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.tridentsdk.server.packets.play.out;
import io.netty.buffer.ByteBuf;
import net.tridentsdk.server.netty.Codec;
import net.tridentsdk.server.netty.packet.OutPacket;
public class PacketPlayOutUpdateEntityNBT extends OutPacket {
protected int entityId;
@Override
public int id() {
return 0x49;
}
public int entityId() {
return this.entityId;
}
@Override
public void encode(ByteBuf buf) {
Codec.writeVarInt32(buf, this.entityId);
// TODO: NBT tag writing
}
}
| apache-2.0 |
jingwei/krati | krati-main/src/retention/java/krati/retention/EventBatchCursor.java | 913 | /*
* Copyright (c) 2010-2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package krati.retention;
/**
* EventBatchCursor
*
* @version 0.4.2
* @author jwu
*
* <p>
* 07/31, 2011 - Created <br/>
*/
public interface EventBatchCursor {
public int getLookup();
public EventBatchHeader getHeader();
public void setHeader(EventBatchHeader header);
}
| apache-2.0 |
mmaracic/elasticsearch | core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java | 8953 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.index.Index;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
import org.elasticsearch.transport.RemoteTransportException;
import java.io.FileNotFoundException;
import java.io.IOException;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
/**
*
*/
public class BytesRestResponseTests extends ESTestCase {
public void testWithHeaders() throws Exception {
RestRequest request = new FakeRestRequest();
RestChannel channel = randomBoolean() ? new DetailedExceptionRestChannel(request) : new SimpleExceptionRestChannel(request);
BytesRestResponse response = new BytesRestResponse(channel, new WithHeadersException());
assertThat(response.getHeaders().get("n1"), notNullValue());
assertThat(response.getHeaders().get("n1"), contains("v11", "v12"));
assertThat(response.getHeaders().get("n2"), notNullValue());
assertThat(response.getHeaders().get("n2"), contains("v21", "v22"));
}
public void testSimpleExceptionMessage() throws Exception {
RestRequest request = new FakeRestRequest();
RestChannel channel = new SimpleExceptionRestChannel(request);
Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar"));
BytesRestResponse response = new BytesRestResponse(channel, t);
String text = response.content().toUtf8();
assertThat(text, containsString("ElasticsearchException[an error occurred reading data]"));
assertThat(text, not(containsString("FileNotFoundException")));
assertThat(text, not(containsString("/foo/bar")));
assertThat(text, not(containsString("error_trace")));
}
public void testDetailedExceptionMessage() throws Exception {
RestRequest request = new FakeRestRequest();
RestChannel channel = new DetailedExceptionRestChannel(request);
Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar"));
BytesRestResponse response = new BytesRestResponse(channel, t);
String text = response.content().toUtf8();
assertThat(text, containsString("{\"type\":\"exception\",\"reason\":\"an error occurred reading data\"}"));
assertThat(text, containsString("{\"type\":\"file_not_found_exception\",\"reason\":\"/foo/bar\"}"));
}
public void testNonElasticsearchExceptionIsNotShownAsSimpleMessage() throws Exception {
RestRequest request = new FakeRestRequest();
RestChannel channel = new SimpleExceptionRestChannel(request);
Throwable t = new Throwable("an error occurred reading data", new FileNotFoundException("/foo/bar"));
BytesRestResponse response = new BytesRestResponse(channel, t);
String text = response.content().toUtf8();
assertThat(text, not(containsString("Throwable[an error occurred reading data]")));
assertThat(text, not(containsString("FileNotFoundException[/foo/bar]")));
assertThat(text, not(containsString("error_trace")));
assertThat(text, containsString("\"error\":\"No ElasticsearchException found\""));
}
public void testErrorTrace() throws Exception {
RestRequest request = new FakeRestRequest();
request.params().put("error_trace", "true");
RestChannel channel = new DetailedExceptionRestChannel(request);
Throwable t = new Throwable("an error occurred reading data", new FileNotFoundException("/foo/bar"));
BytesRestResponse response = new BytesRestResponse(channel, t);
String text = response.content().toUtf8();
assertThat(text, containsString("\"type\":\"throwable\",\"reason\":\"an error occurred reading data\""));
assertThat(text, containsString("{\"type\":\"file_not_found_exception\""));
assertThat(text, containsString("\"stack_trace\":\"[an error occurred reading data]"));
}
public void testGuessRootCause() throws IOException {
RestRequest request = new FakeRestRequest();
RestChannel channel = new DetailedExceptionRestChannel(request);
{
Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar"));
BytesRestResponse response = new BytesRestResponse(channel, t);
String text = response.content().toUtf8();
assertThat(text, containsString("{\"root_cause\":[{\"type\":\"exception\",\"reason\":\"an error occurred reading data\"}]"));
}
{
Throwable t = new FileNotFoundException("/foo/bar");
BytesRestResponse response = new BytesRestResponse(channel, t);
String text = response.content().toUtf8();
assertThat(text, containsString("{\"root_cause\":[{\"type\":\"file_not_found_exception\",\"reason\":\"/foo/bar\"}]"));
}
}
public void testNullThrowable() throws Exception {
RestRequest request = new FakeRestRequest();
RestChannel channel = new SimpleExceptionRestChannel(request);
BytesRestResponse response = new BytesRestResponse(channel, null);
String text = response.content().toUtf8();
assertThat(text, containsString("\"error\":\"unknown\""));
assertThat(text, not(containsString("error_trace")));
}
public void testConvert() throws IOException {
RestRequest request = new FakeRestRequest();
RestChannel channel = new DetailedExceptionRestChannel(request);
ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
new SearchShardTarget("node_1", new Index("foo", "_na_"), 1));
ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
new SearchShardTarget("node_1", new Index("foo", "_na_"), 2));
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1});
BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex));
String text = response.content().toUtf8();
String expected = "{\"error\":{\"root_cause\":[{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}}]},\"status\":400}";
assertEquals(expected.trim(), text.trim());
String stackTrace = ExceptionsHelper.stackTrace(ex);
assertTrue(stackTrace.contains("Caused by: ParsingException[foobar]"));
}
public static class WithHeadersException extends ElasticsearchException {
WithHeadersException() {
super("");
this.addHeader("n1", "v11", "v12");
this.addHeader("n2", "v21", "v22");
}
}
private static class SimpleExceptionRestChannel extends AbstractRestChannel {
SimpleExceptionRestChannel(RestRequest request) {
super(request, false);
}
@Override
public void sendResponse(RestResponse response) {
}
}
private static class DetailedExceptionRestChannel extends AbstractRestChannel {
DetailedExceptionRestChannel(RestRequest request) {
super(request, true);
}
@Override
public void sendResponse(RestResponse response) {
}
}
}
| apache-2.0 |
gradle/gradle | subprojects/internal-performance-testing/src/main/groovy/org/gradle/performance/results/PerformanceScenario.java | 2296 | /*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.performance.results;
import java.util.Comparator;
import java.util.Objects;
public class PerformanceScenario implements Comparable<PerformanceScenario> {
private static final Comparator<PerformanceScenario> PERFORMANCE_SCENARIO_COMPARATOR = Comparator
.comparing(PerformanceScenario::getTestName)
.thenComparing(PerformanceScenario::getClassName);
private final String className;
private final String testName;
public PerformanceScenario(String className, String testName) {
this.className = className;
this.testName = testName;
}
public String getClassName() {
return className;
}
public String getSimpleClassName() {
return className.substring(className.lastIndexOf('.') + 1);
}
public String getTestName() {
return testName;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PerformanceScenario that = (PerformanceScenario) o;
return Objects.equals(className, that.className) &&
Objects.equals(testName, that.testName);
}
@Override
public int hashCode() {
return Objects.hash(className, testName);
}
@Override
public String toString() {
return "PerformanceScenario{" +
"className='" + className + '\'' +
", scenario='" + testName + '\'' +
'}';
}
@Override
public int compareTo(PerformanceScenario o) {
return PERFORMANCE_SCENARIO_COMPARATOR.compare(this, o);
}
}
| apache-2.0 |
manstis/drools | kie-pmml-trusty/kie-pmml-compiler/kie-pmml-compiler-commons/src/main/java/org/kie/pmml/compiler/commons/codegenfactories/KiePMMLSimpleSetPredicateFactory.java | 5615 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.compiler.commons.codegenfactories;
import java.util.List;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import org.dmg.pmml.SimpleSetPredicate;
import org.kie.pmml.api.enums.ARRAY_TYPE;
import org.kie.pmml.api.enums.IN_NOTIN;
import org.kie.pmml.api.exceptions.KiePMMLException;
import org.kie.pmml.compiler.commons.utils.JavaParserUtils;
import static org.kie.pmml.commons.Constants.MISSING_BODY_TEMPLATE;
import static org.kie.pmml.commons.Constants.MISSING_VARIABLE_INITIALIZER_TEMPLATE;
import static org.kie.pmml.commons.Constants.MISSING_VARIABLE_IN_BODY;
import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getChainedMethodCallExprFrom;
import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getExpressionForObject;
import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getVariableDeclarator;
import static org.kie.pmml.compiler.commons.utils.JavaParserUtils.MAIN_CLASS_NOT_FOUND;
import static org.kie.pmml.compiler.api.utils.ModelUtils.getObjectsFromArray;
/**
* Class meant to provide <i>helper</i> method to retrieve <code>KiePMMLSimpleSetPredicate</code> code-generators
* out of <code>SimpleSetPredicate</code>s
*/
public class KiePMMLSimpleSetPredicateFactory {
static final String KIE_PMML_SIMPLESET_PREDICATE_TEMPLATE_JAVA = "KiePMMLSimpleSetPredicateTemplate.tmpl";
static final String KIE_PMML_SIMPLESET_PREDICATE_TEMPLATE = "KiePMMLSimpleSetPredicateTemplate";
static final String GETKIEPMMLSIMPLESETPREDICATE = "getKiePMMLSimpleSetPredicate";
static final String SIMPLESET_PREDICATE = "simpleSetPredicate";
static final ClassOrInterfaceDeclaration SIMPLESET_PREDICATE_TEMPLATE;
static {
CompilationUnit cloneCU = JavaParserUtils.getFromFileName(KIE_PMML_SIMPLESET_PREDICATE_TEMPLATE_JAVA);
SIMPLESET_PREDICATE_TEMPLATE = cloneCU.getClassByName(KIE_PMML_SIMPLESET_PREDICATE_TEMPLATE)
.orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + KIE_PMML_SIMPLESET_PREDICATE_TEMPLATE));
SIMPLESET_PREDICATE_TEMPLATE.getMethodsByName(GETKIEPMMLSIMPLESETPREDICATE).get(0).clone();
}
private KiePMMLSimpleSetPredicateFactory() {
// Avoid instantiation
}
static BlockStmt getSimpleSetPredicateVariableDeclaration(final String variableName, final SimpleSetPredicate simpleSetPredicate) {
final MethodDeclaration methodDeclaration =
SIMPLESET_PREDICATE_TEMPLATE.getMethodsByName(GETKIEPMMLSIMPLESETPREDICATE).get(0).clone();
final BlockStmt simpleSetPredicateBody =
methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration)));
final VariableDeclarator variableDeclarator =
getVariableDeclarator(simpleSetPredicateBody, SIMPLESET_PREDICATE).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, SIMPLESET_PREDICATE, simpleSetPredicateBody)));
variableDeclarator.setName(variableName);
final BlockStmt toReturn = new BlockStmt();
final NodeList<Expression> arguments = new NodeList<>();
List<Object> values = getObjectsFromArray(simpleSetPredicate.getArray());
for (Object value : values) {
arguments.add(getExpressionForObject(value));
}
final ARRAY_TYPE arrayType = ARRAY_TYPE.byName(simpleSetPredicate.getArray().getType().value());
final NameExpr arrayTypeExpr = new NameExpr(ARRAY_TYPE.class.getName() + "." + arrayType.name());
final IN_NOTIN inNotIn = IN_NOTIN.byName(simpleSetPredicate.getBooleanOperator().value());
final NameExpr inNotInExpr = new NameExpr(IN_NOTIN.class.getName() + "." + inNotIn.name());
final MethodCallExpr initializer = variableDeclarator.getInitializer()
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, SIMPLESET_PREDICATE, simpleSetPredicateBody)))
.asMethodCallExpr();
final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer);
builder.setArgument(0, new StringLiteralExpr(simpleSetPredicate.getField().getValue()));
builder.setArgument(2, arrayTypeExpr);
builder.setArgument(3, inNotInExpr);
getChainedMethodCallExprFrom("asList", initializer).setArguments(arguments);
simpleSetPredicateBody.getStatements().forEach(toReturn::addStatement);
return toReturn;
}
}
| apache-2.0 |
HappyRay/azkaban | az-hadoop-jobtype-plugin/src/main/java/azkaban/jobtype/SparkJobArg.java | 1938 | package azkaban.jobtype;
public enum SparkJobArg {
// standard spark submit arguments, ordered in the spark-submit --help order
MASTER("master", false), // just to trick the eclipse formatter
DEPLOY_MODE("deploy-mode", false), //
CLASS("class", false), //
NAME("name", false), //
SPARK_JARS("jars", true), //
SPARK_PACKAGES("packages", false),
PACKAGES("packages", false), //
REPOSITORIES("repositories", false), //
PY_FILES("py-files", false), //
FILES("files", false), //
SPARK_CONF_PREFIX("conf.", "--conf", true), //
PROPERTIES_FILE("properties-file", false), //
DRIVER_MEMORY("driver-memory", false), //
DRIVER_JAVA_OPTIONS("driver-java-options", true), //
DRIVER_LIBRARY_PATH("driver-library-path", false), //
DRIVER_CLASS_PATH("driver-class-path", false), //
EXECUTOR_MEMORY("executor-memory", false), //
PROXY_USER("proxy-user", false), //
SPARK_FLAG_PREFIX("flag.", "--", true), // --help, --verbose, --supervise, --version
// Yarn only Arguments
EXECUTOR_CORES("executor-cores", false), //
DRIVER_CORES("driver-cores", false), //
QUEUE("queue", false), //
NUM_EXECUTORS("num-executors", false), //
ARCHIVES("archives", false), //
PRINCIPAL("principal", false), //
KEYTAB("keytab", false), //
// Not SparkSubmit arguments: only exists in azkaban
EXECUTION_JAR("execution-jar", null, true), //
PARAMS("params", null, true), //
SPARK_VERSION("spark-version", null, true),
;
public static final String delimiter = "\u001A";
SparkJobArg(String propName, boolean specialTreatment) {
this(propName, "--" + propName, specialTreatment);
}
SparkJobArg(String azPropName, String sparkParamName, boolean specialTreatment) {
this.azPropName = azPropName;
this.sparkParamName = sparkParamName;
this.needSpecialTreatment = specialTreatment;
}
final String azPropName;
final String sparkParamName;
final boolean needSpecialTreatment;
}
| apache-2.0 |
wso2/wso2-marketing-demos | samples/USEAnalyticsSystem/src/election-siddhi-extensions/popularhtag/src/test/java/org/wso2/siddhi/extension/popularhtag/TopKTagsExtensionExtensionTestCase.java | 3395 | package org.wso2.siddhi.extension.popularhtag;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Assert;
import org.apache.log4j.Logger;
import org.junit.Before;
import org.junit.Test;
import org.wso2.siddhi.core.ExecutionPlanRuntime;
import org.wso2.siddhi.core.SiddhiManager;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.query.output.callback.QueryCallback;
import org.wso2.siddhi.core.stream.input.InputHandler;
import org.wso2.siddhi.extension.popularhtag.test.util.SiddhiTestHelper;
import org.wso2.siddhi.core.util.EventPrinter;
public class TopKTagsExtensionExtensionTestCase {
static final Logger log = Logger.getLogger(TopKTagsExtensionExtensionTestCase.class);
private AtomicInteger count = new AtomicInteger(0);
private volatile boolean eventArrived;
@Before
public void init() {
count.set(0);
eventArrived = false;
}
@Test
public void testContainsFunctionExtension() throws InterruptedException {
log.info("TopKTagsExtensionExtensionTestCase TestCase ");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "@config(async = 'true')define stream inputStream (htaglist string,Rt int ,Ft int);";
String query = ("@info(name = 'query1') "
+ "from inputStream#HTag:getTopTag(htaglist,1,500,'TRUMP2016,MAKEAMERICAGREATAGAIN') "
+ "select Index as count,htaglist as ishtaglist " + "insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager
.createExecutionPlanRuntime(inStreamDefinition + query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count.incrementAndGet();
if (count.get() == 1) {
Assert.assertEquals(1, inEvent.getData(0));
Assert.assertEquals("TRUMP", inEvent.getData(1));
}
if (count.get() == 2) {
Assert.assertEquals(1, inEvent.getData(0));
Assert.assertEquals("BERNIE", inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
inputHandler.send(new Object[] { "TRUMP , TRUMP , TRUMP , BERNIE ", 10, 5 });
inputHandler.send(new Object[] { "TRUMP , TRUMP , TRUMP , BERNIE , BERNIE , BERNIE ", 1, 50 });
inputHandler
.send(new Object[] {
"BERNIE , BERNIE , BERNIE , BERNIE , BERNIE , BERNIE , BERNIE , BERNIE , BERNIE , BERNIE , BERNIE , BERNIE ",
10, 90 });
inputHandler.send(new Object[] {
"CLINTON , TRUMP2016 , TRUMP2016 , TRUMP2016 , TRUMP2016 , TRUMP2016 , TRUMP2016 ", 94, 75 });
SiddhiTestHelper.waitForEvents(100, 2, count, 60000);
Assert.assertEquals(2, count.get());
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
} | apache-2.0 |
jean-merelis/keycloak | adapters/saml/core/src/main/java/org/keycloak/adapters/saml/profile/webbrowsersso/SamlEndpoint.java | 2093 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.adapters.saml.profile.webbrowsersso;
import org.keycloak.adapters.saml.OnSessionCreated;
import org.keycloak.adapters.saml.SamlDeployment;
import org.keycloak.adapters.saml.SamlSessionStore;
import org.keycloak.adapters.saml.profile.SamlInvocationContext;
import org.keycloak.adapters.spi.AuthOutcome;
import org.keycloak.adapters.spi.HttpFacade;
import org.keycloak.saml.common.constants.GeneralConstants;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class SamlEndpoint extends WebBrowserSsoAuthenticationHandler {
public SamlEndpoint(HttpFacade facade, SamlDeployment deployment, SamlSessionStore sessionStore) {
super(facade, deployment, sessionStore);
}
@Override
public AuthOutcome handle(OnSessionCreated onCreateSession) {
String samlRequest = facade.getRequest().getFirstParam(GeneralConstants.SAML_REQUEST_KEY);
String samlResponse = facade.getRequest().getFirstParam(GeneralConstants.SAML_RESPONSE_KEY);
String relayState = facade.getRequest().getFirstParam(GeneralConstants.RELAY_STATE);
if (samlRequest != null) {
return handleSamlRequest(samlRequest, relayState);
} else if (samlResponse != null) {
return handleSamlResponse(samlResponse, relayState, onCreateSession);
}
return AuthOutcome.NOT_ATTEMPTED;
}
}
| apache-2.0 |
dolfdijkstra/gst-foundation | gsf-core/src/main/java/tools/gsf/facade/runtag/workflowaction/SetElementName.java | 1121 | /*
* Copyright 2008 Metastratus Web Solutions Limited. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tools.gsf.facade.runtag.workflowaction;
import tools.gsf.facade.runtag.render.TagRunnerWithRenderArguments;
/**
* @author David Chesebro
* @since 3/16/12
*/
public class SetElementName extends TagRunnerWithRenderArguments {
public SetElementName() {
super("WORKFLOWENGINE.SETELEMENTNAME");
}
public void setName(String name) {
this.set("NAME", name);
}
public void setValue(String value) {
this.set("VALUE", value);
}
}
| apache-2.0 |
gustavoanatoly/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java | 15981 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.quotas;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.SnapshotType;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.quotas.SnapshotQuotaObserverChore.SnapshotWithSize;
import org.apache.hadoop.hbase.quotas.SpaceQuotaHelperForTests.NoFilesToDischarge;
import org.apache.hadoop.hbase.quotas.SpaceQuotaHelperForTests.SpaceQuotaSnapshotPredicate;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
/**
* Test class for the {@link SnapshotQuotaObserverChore}.
*/
@Category(MediumTests.class)
public class TestSnapshotQuotaObserverChore {
private static final Log LOG = LogFactory.getLog(TestSnapshotQuotaObserverChore.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final AtomicLong COUNTER = new AtomicLong();
@Rule
public TestName testName = new TestName();
private Connection conn;
private Admin admin;
private SpaceQuotaHelperForTests helper;
private HMaster master;
private SnapshotQuotaObserverChore testChore;
@BeforeClass
public static void setUp() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
SpaceQuotaHelperForTests.updateConfigForQuotas(conf);
// Clean up the compacted files faster than normal (15s instead of 2mins)
conf.setInt("hbase.hfile.compaction.discharger.interval", 15 * 1000);
TEST_UTIL.startMiniCluster(1);
}
@AfterClass
public static void tearDown() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Before
public void setup() throws Exception {
conn = TEST_UTIL.getConnection();
admin = TEST_UTIL.getAdmin();
helper = new SpaceQuotaHelperForTests(TEST_UTIL, testName, COUNTER);
master = TEST_UTIL.getHBaseCluster().getMaster();
helper.removeAllQuotas(conn);
testChore = new SnapshotQuotaObserverChore(
TEST_UTIL.getConnection(), TEST_UTIL.getConfiguration(), master.getFileSystem(), master,
null);
}
@Test
public void testSnapshotSizePersistence() throws IOException {
final Admin admin = TEST_UTIL.getAdmin();
final TableName tn = TableName.valueOf("quota_snapshotSizePersistence");
if (admin.tableExists(tn)) {
admin.disableTable(tn);
admin.deleteTable(tn);
}
HTableDescriptor desc = new HTableDescriptor(tn);
desc.addFamily(new HColumnDescriptor(QuotaTableUtil.QUOTA_FAMILY_USAGE));
admin.createTable(desc);
Multimap<TableName,SnapshotWithSize> snapshotsWithSizes = HashMultimap.create();
try (Table table = conn.getTable(tn)) {
// Writing no values will result in no records written.
verify(table, () -> {
testChore.persistSnapshotSizes(table, snapshotsWithSizes);
assertEquals(0, count(table));
});
verify(table, () -> {
TableName originatingTable = TableName.valueOf("t1");
snapshotsWithSizes.put(originatingTable, new SnapshotWithSize("ss1", 1024L));
snapshotsWithSizes.put(originatingTable, new SnapshotWithSize("ss2", 4096L));
testChore.persistSnapshotSizes(table, snapshotsWithSizes);
assertEquals(2, count(table));
assertEquals(1024L, extractSnapshotSize(table, originatingTable, "ss1"));
assertEquals(4096L, extractSnapshotSize(table, originatingTable, "ss2"));
});
snapshotsWithSizes.clear();
verify(table, () -> {
snapshotsWithSizes.put(TableName.valueOf("t1"), new SnapshotWithSize("ss1", 1024L));
snapshotsWithSizes.put(TableName.valueOf("t2"), new SnapshotWithSize("ss2", 4096L));
snapshotsWithSizes.put(TableName.valueOf("t3"), new SnapshotWithSize("ss3", 8192L));
testChore.persistSnapshotSizes(table, snapshotsWithSizes);
assertEquals(3, count(table));
assertEquals(1024L, extractSnapshotSize(table, TableName.valueOf("t1"), "ss1"));
assertEquals(4096L, extractSnapshotSize(table, TableName.valueOf("t2"), "ss2"));
assertEquals(8192L, extractSnapshotSize(table, TableName.valueOf("t3"), "ss3"));
});
}
}
@Test
public void testSnapshotsFromTables() throws Exception {
TableName tn1 = helper.createTableWithRegions(1);
TableName tn2 = helper.createTableWithRegions(1);
TableName tn3 = helper.createTableWithRegions(1);
// Set a space quota on table 1 and 2 (but not 3)
admin.setQuota(QuotaSettingsFactory.limitTableSpace(
tn1, SpaceQuotaHelperForTests.ONE_GIGABYTE, SpaceViolationPolicy.NO_INSERTS));
admin.setQuota(QuotaSettingsFactory.limitTableSpace(
tn2, SpaceQuotaHelperForTests.ONE_GIGABYTE, SpaceViolationPolicy.NO_INSERTS));
// Create snapshots on each table (we didn't write any data, so just skipflush)
admin.snapshot(new SnapshotDescription(tn1 + "snapshot", tn1, SnapshotType.SKIPFLUSH));
admin.snapshot(new SnapshotDescription(tn2 + "snapshot", tn2, SnapshotType.SKIPFLUSH));
admin.snapshot(new SnapshotDescription(tn3 + "snapshot", tn3, SnapshotType.SKIPFLUSH));
Multimap<TableName,String> mapping = testChore.getSnapshotsToComputeSize();
assertEquals(2, mapping.size());
assertEquals(1, mapping.get(tn1).size());
assertEquals(tn1 + "snapshot", mapping.get(tn1).iterator().next());
assertEquals(1, mapping.get(tn2).size());
assertEquals(tn2 + "snapshot", mapping.get(tn2).iterator().next());
admin.snapshot(new SnapshotDescription(tn2 + "snapshot1", tn2, SnapshotType.SKIPFLUSH));
admin.snapshot(new SnapshotDescription(tn3 + "snapshot1", tn3, SnapshotType.SKIPFLUSH));
mapping = testChore.getSnapshotsToComputeSize();
assertEquals(3, mapping.size());
assertEquals(1, mapping.get(tn1).size());
assertEquals(tn1 + "snapshot", mapping.get(tn1).iterator().next());
assertEquals(2, mapping.get(tn2).size());
assertEquals(
new HashSet<String>(Arrays.asList(tn2 + "snapshot", tn2 + "snapshot1")), mapping.get(tn2));
}
@Test
public void testSnapshotsFromNamespaces() throws Exception {
NamespaceDescriptor ns = NamespaceDescriptor.create("snapshots_from_namespaces").build();
admin.createNamespace(ns);
TableName tn1 = helper.createTableWithRegions(ns.getName(), 1);
TableName tn2 = helper.createTableWithRegions(ns.getName(), 1);
TableName tn3 = helper.createTableWithRegions(1);
// Set a space quota on the namespace
admin.setQuota(QuotaSettingsFactory.limitNamespaceSpace(
ns.getName(), SpaceQuotaHelperForTests.ONE_GIGABYTE, SpaceViolationPolicy.NO_INSERTS));
// Create snapshots on each table (we didn't write any data, so just skipflush)
admin.snapshot(new SnapshotDescription(
tn1.getQualifierAsString() + "snapshot", tn1, SnapshotType.SKIPFLUSH));
admin.snapshot(new SnapshotDescription(
tn2.getQualifierAsString() + "snapshot", tn2, SnapshotType.SKIPFLUSH));
admin.snapshot(new SnapshotDescription(
tn3.getQualifierAsString() + "snapshot", tn3, SnapshotType.SKIPFLUSH));
Multimap<TableName,String> mapping = testChore.getSnapshotsToComputeSize();
assertEquals(2, mapping.size());
assertEquals(1, mapping.get(tn1).size());
assertEquals(tn1.getQualifierAsString() + "snapshot", mapping.get(tn1).iterator().next());
assertEquals(1, mapping.get(tn2).size());
assertEquals(tn2.getQualifierAsString() + "snapshot", mapping.get(tn2).iterator().next());
admin.snapshot(new SnapshotDescription(
tn2.getQualifierAsString() + "snapshot1", tn2, SnapshotType.SKIPFLUSH));
admin.snapshot(new SnapshotDescription(
tn3.getQualifierAsString() + "snapshot2", tn3, SnapshotType.SKIPFLUSH));
mapping = testChore.getSnapshotsToComputeSize();
assertEquals(3, mapping.size());
assertEquals(1, mapping.get(tn1).size());
assertEquals(tn1.getQualifierAsString() + "snapshot", mapping.get(tn1).iterator().next());
assertEquals(2, mapping.get(tn2).size());
assertEquals(
new HashSet<String>(Arrays.asList(tn2.getQualifierAsString() + "snapshot",
tn2.getQualifierAsString() + "snapshot1")), mapping.get(tn2));
}
@Test
public void testSnapshotSize() throws Exception {
// Create a table and set a quota
TableName tn1 = helper.createTableWithRegions(5);
admin.setQuota(QuotaSettingsFactory.limitTableSpace(
tn1, SpaceQuotaHelperForTests.ONE_GIGABYTE, SpaceViolationPolicy.NO_INSERTS));
// Write some data and flush it
helper.writeData(tn1, 256L * SpaceQuotaHelperForTests.ONE_KILOBYTE);
admin.flush(tn1);
final AtomicReference<Long> lastSeenSize = new AtomicReference<>();
// Wait for the Master chore to run to see the usage (with a fudge factor)
TEST_UTIL.waitFor(30_000, new SpaceQuotaSnapshotPredicate(conn, tn1) {
@Override
boolean evaluate(SpaceQuotaSnapshot snapshot) throws Exception {
lastSeenSize.set(snapshot.getUsage());
return snapshot.getUsage() > 230L * SpaceQuotaHelperForTests.ONE_KILOBYTE;
}
});
// Create a snapshot on the table
final String snapshotName = tn1 + "snapshot";
admin.snapshot(new SnapshotDescription(snapshotName, tn1, SnapshotType.SKIPFLUSH));
// Get the snapshots
Multimap<TableName,String> snapshotsToCompute = testChore.getSnapshotsToComputeSize();
assertEquals(
"Expected to see the single snapshot: " + snapshotsToCompute, 1, snapshotsToCompute.size());
// Get the size of our snapshot
Multimap<TableName,SnapshotWithSize> snapshotsWithSize = testChore.computeSnapshotSizes(
snapshotsToCompute);
assertEquals(1, snapshotsWithSize.size());
SnapshotWithSize sws = Iterables.getOnlyElement(snapshotsWithSize.get(tn1));
assertEquals(snapshotName, sws.getName());
// The snapshot should take up no space since the table refers to it completely
assertEquals(0, sws.getSize());
// Write some more data, flush it, and then major_compact the table
helper.writeData(tn1, 256L * SpaceQuotaHelperForTests.ONE_KILOBYTE);
admin.flush(tn1);
TEST_UTIL.compact(tn1, true);
// Test table should reflect it's original size since ingest was deterministic
TEST_UTIL.waitFor(30_000, new SpaceQuotaSnapshotPredicate(conn, tn1) {
@Override
boolean evaluate(SpaceQuotaSnapshot snapshot) throws Exception {
LOG.debug("Current usage=" + snapshot.getUsage() + " lastSeenSize=" + lastSeenSize.get());
return closeInSize(
snapshot.getUsage(), lastSeenSize.get(), SpaceQuotaHelperForTests.ONE_KILOBYTE);
}
});
// Wait for no compacted files on the regions of our table
TEST_UTIL.waitFor(30_000, new NoFilesToDischarge(TEST_UTIL.getMiniHBaseCluster(), tn1));
// Still should see only one snapshot
snapshotsToCompute = testChore.getSnapshotsToComputeSize();
assertEquals(
"Expected to see the single snapshot: " + snapshotsToCompute, 1, snapshotsToCompute.size());
snapshotsWithSize = testChore.computeSnapshotSizes(
snapshotsToCompute);
assertEquals(1, snapshotsWithSize.size());
sws = Iterables.getOnlyElement(snapshotsWithSize.get(tn1));
assertEquals(snapshotName, sws.getName());
// The snapshot should take up the size the table originally took up
assertEquals(lastSeenSize.get().longValue(), sws.getSize());
}
@Test
public void testPersistingSnapshotsForNamespaces() throws Exception {
Multimap<TableName,SnapshotWithSize> snapshotsWithSizes = HashMultimap.create();
TableName tn1 = TableName.valueOf("ns1:tn1");
TableName tn2 = TableName.valueOf("ns1:tn2");
TableName tn3 = TableName.valueOf("ns2:tn1");
TableName tn4 = TableName.valueOf("ns2:tn2");
TableName tn5 = TableName.valueOf("tn1");
snapshotsWithSizes.put(tn1, new SnapshotWithSize("", 1024L));
snapshotsWithSizes.put(tn2, new SnapshotWithSize("", 1024L));
snapshotsWithSizes.put(tn3, new SnapshotWithSize("", 512L));
snapshotsWithSizes.put(tn4, new SnapshotWithSize("", 1024L));
snapshotsWithSizes.put(tn5, new SnapshotWithSize("", 3072L));
Map<String,Long> nsSizes = testChore.groupSnapshotSizesByNamespace(snapshotsWithSizes);
assertEquals(3, nsSizes.size());
assertEquals(2048L, (long) nsSizes.get("ns1"));
assertEquals(1536L, (long) nsSizes.get("ns2"));
assertEquals(3072L, (long) nsSizes.get(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR));
}
private long count(Table t) throws IOException {
try (ResultScanner rs = t.getScanner(new Scan())) {
long sum = 0;
for (Result r : rs) {
while (r.advance()) {
sum++;
}
}
return sum;
}
}
private long extractSnapshotSize(
Table quotaTable, TableName tn, String snapshot) throws IOException {
Get g = QuotaTableUtil.makeGetForSnapshotSize(tn, snapshot);
Result r = quotaTable.get(g);
assertNotNull(r);
CellScanner cs = r.cellScanner();
cs.advance();
Cell c = cs.current();
assertNotNull(c);
return QuotaTableUtil.extractSnapshotSize(
c.getValueArray(), c.getValueOffset(), c.getValueLength());
}
private void verify(Table t, IOThrowingRunnable test) throws IOException {
admin.disableTable(t.getName());
admin.truncateTable(t.getName(), false);
test.run();
}
@FunctionalInterface
private interface IOThrowingRunnable {
void run() throws IOException;
}
/**
* Computes if {@code size2} is within {@code delta} of {@code size1}, inclusive.
*/
boolean closeInSize(long size1, long size2, long delta) {
long lower = size1 - delta;
long upper = size1 + delta;
return lower <= size2 && size2 <= upper;
}
}
| apache-2.0 |
milcom/hibernate-redis | hibernate-examples/src/test/java/org/hibernate/examples/mapping/usertype/JodaDateTimeTZEntity.java | 1760 | package org.hibernate.examples.mapping.usertype;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.*;
import org.hibernate.examples.model.AbstractHibernateEntity;
import org.hibernate.examples.utils.HashTool;
import org.hibernate.examples.utils.ToStringHelper;
import org.joda.time.DateTime;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
/**
* org.hibernate.examples.mapping.usertype.JodaDateTimeTZEntity
*
* @author 배성혁 sunghyouk.bae@gmail.com
* @since 2013. 12. 9. 오전 9:40
*/
@Entity
@org.hibernate.annotations.Cache(region = "example", usage = CacheConcurrencyStrategy.READ_WRITE)
@DynamicInsert
@DynamicUpdate
@Getter
@Setter
public class JodaDateTimeTZEntity extends AbstractHibernateEntity<Long> {
@Id
@GeneratedValue
private Long id;
/**
* UTC DateTime 과 TimeZone 으로 분리해서 저장하고, 로드 시에는 통합합니다.
*/
@Columns(columns = { @Column(name = "startTime"), @Column(name = "startTimeZone") })
@Type(type = "org.hibernate.examples.usertype.JodaDateTimeTZUserType")
private DateTime startTZ;
@Columns(columns = { @Column(name = "endTime"), @Column(name = "endTimeZone") })
@Type(type = "org.hibernate.examples.usertype.JodaDateTimeTZUserType")
private DateTime endTZ;
@Override
public int hashCode() {
return HashTool.compute(startTZ, endTZ);
}
@Override
public ToStringHelper buildStringHelper() {
return super.buildStringHelper()
.add("startTZ", startTZ)
.add("endTZ", endTZ);
}
private static final long serialVersionUID = 5531635835898743185L;
}
| apache-2.0 |
zwets/flowable-engine | modules/flowable-idm-engine/src/main/java/org/flowable/idm/engine/impl/persistence/entity/data/impl/MybatisByteArrayDataManager.java | 1929 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.idm.engine.impl.persistence.entity.data.impl;
import java.util.List;
import org.flowable.idm.engine.IdmEngineConfiguration;
import org.flowable.idm.engine.impl.persistence.entity.IdmByteArrayEntity;
import org.flowable.idm.engine.impl.persistence.entity.IdmByteArrayEntityImpl;
import org.flowable.idm.engine.impl.persistence.entity.data.AbstractIdmDataManager;
import org.flowable.idm.engine.impl.persistence.entity.data.ByteArrayDataManager;
/**
* @author Joram Barrez
*/
public class MybatisByteArrayDataManager extends AbstractIdmDataManager<IdmByteArrayEntity> implements ByteArrayDataManager {
public MybatisByteArrayDataManager(IdmEngineConfiguration idmEngineConfiguration) {
super(idmEngineConfiguration);
}
@Override
public IdmByteArrayEntity create() {
return new IdmByteArrayEntityImpl();
}
@Override
public Class<? extends IdmByteArrayEntity> getManagedEntityClass() {
return IdmByteArrayEntityImpl.class;
}
@Override
@SuppressWarnings("unchecked")
public List<IdmByteArrayEntity> findAll() {
return getDbSqlSession().selectList("selectIdmByteArrays");
}
@Override
public void deleteByteArrayNoRevisionCheck(String byteArrayEntityId) {
getDbSqlSession().delete("deleteIdmByteArrayNoRevisionCheck", byteArrayEntityId);
}
}
| apache-2.0 |
stephraleigh/flowable-engine | modules/flowable-bpmn-model/src/main/java/org/flowable/bpmn/model/Association.java | 1893 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.bpmn.model;
/**
* @author Tijs Rademakers
*/
public class Association extends Artifact {
protected AssociationDirection associationDirection = AssociationDirection.NONE;
protected String sourceRef;
protected String targetRef;
public AssociationDirection getAssociationDirection() {
return associationDirection;
}
public void setAssociationDirection(AssociationDirection associationDirection) {
this.associationDirection = associationDirection;
}
public String getSourceRef() {
return sourceRef;
}
public void setSourceRef(String sourceRef) {
this.sourceRef = sourceRef;
}
public String getTargetRef() {
return targetRef;
}
public void setTargetRef(String targetRef) {
this.targetRef = targetRef;
}
public Association clone() {
Association clone = new Association();
clone.setValues(this);
return clone;
}
public void setValues(Association otherElement) {
super.setValues(otherElement);
setSourceRef(otherElement.getSourceRef());
setTargetRef(otherElement.getTargetRef());
if (otherElement.getAssociationDirection() != null) {
setAssociationDirection(otherElement.getAssociationDirection());
}
}
}
| apache-2.0 |
dkhwangbo/druid | processing/src/test/java/org/apache/druid/query/IntervalChunkingQueryRunnerTest.java | 3703 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.Druids.TimeseriesQueryBuilder;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
import java.util.concurrent.ExecutorService;
public class IntervalChunkingQueryRunnerTest
{
private IntervalChunkingQueryRunnerDecorator decorator;
private ExecutorService executors;
private QueryRunner baseRunner;
private QueryToolChest toolChest;
private final TimeseriesQueryBuilder queryBuilder;
public IntervalChunkingQueryRunnerTest()
{
queryBuilder = Druids.newTimeseriesQueryBuilder()
.dataSource("test")
.aggregators(Collections.singletonList(new CountAggregatorFactory("count")));
}
@Before
public void setup()
{
executors = EasyMock.createMock(ExecutorService.class);
ServiceEmitter emitter = EasyMock.createNiceMock(ServiceEmitter.class);
decorator = new IntervalChunkingQueryRunnerDecorator(executors,
QueryRunnerTestHelper.NOOP_QUERYWATCHER, emitter);
baseRunner = EasyMock.createMock(QueryRunner.class);
toolChest = EasyMock.createNiceMock(QueryToolChest.class);
}
@Test
public void testDefaultNoChunking()
{
QueryPlus queryPlus = QueryPlus.wrap(queryBuilder.intervals("2014/2016").build());
EasyMock.expect(baseRunner.run(queryPlus, Collections.EMPTY_MAP)).andReturn(Sequences.empty());
EasyMock.replay(baseRunner);
QueryRunner runner = decorator.decorate(baseRunner, toolChest);
runner.run(queryPlus, Collections.EMPTY_MAP);
EasyMock.verify(baseRunner);
}
@Test
public void testChunking()
{
Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-01-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1D")).build();
executors.execute(EasyMock.anyObject(Runnable.class));
EasyMock.expectLastCall().times(10);
EasyMock.replay(executors);
EasyMock.replay(toolChest);
QueryRunner runner = decorator.decorate(baseRunner, toolChest);
runner.run(QueryPlus.wrap(query), Collections.EMPTY_MAP);
EasyMock.verify(executors);
}
@Test
public void testChunkingOnMonths()
{
Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-02-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1M")).build();
executors.execute(EasyMock.anyObject(Runnable.class));
EasyMock.expectLastCall().times(2);
EasyMock.replay(executors);
EasyMock.replay(toolChest);
QueryRunner runner = decorator.decorate(baseRunner, toolChest);
runner.run(QueryPlus.wrap(query), Collections.EMPTY_MAP);
EasyMock.verify(executors);
}
}
| apache-2.0 |
codescale/logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/message/ThreadDumpMessage.java | 7028 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.message;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import org.apache.logging.log4j.status.StatusLogger;
import org.apache.logging.log4j.util.StringBuilderFormattable;
import org.apache.logging.log4j.util.Strings;
/**
* Captures information about all running Threads.
*/
@AsynchronouslyFormattable
public class ThreadDumpMessage implements Message, StringBuilderFormattable {
private static final long serialVersionUID = -1103400781608841088L;
private static ThreadInfoFactory FACTORY;
private volatile Map<ThreadInformation, StackTraceElement[]> threads;
private final String title;
private String formattedMessage;
/**
* Generate a ThreadDumpMessage with a title.
* @param title The title.
*/
public ThreadDumpMessage(final String title) {
this.title = title == null ? Strings.EMPTY : title;
threads = getFactory().createThreadInfo();
}
private ThreadDumpMessage(final String formattedMsg, final String title) {
this.formattedMessage = formattedMsg;
this.title = title == null ? Strings.EMPTY : title;
}
private static ThreadInfoFactory getFactory() {
if (FACTORY == null) {
FACTORY = initFactory(ThreadDumpMessage.class.getClassLoader());
}
return FACTORY;
}
private static ThreadInfoFactory initFactory(final ClassLoader classLoader) {
final ServiceLoader<ThreadInfoFactory> serviceLoader = ServiceLoader.load(ThreadInfoFactory.class, classLoader);
ThreadInfoFactory result = null;
try {
final Iterator<ThreadInfoFactory> iterator = serviceLoader.iterator();
while (result == null && iterator.hasNext()) {
result = iterator.next();
}
} catch (ServiceConfigurationError | LinkageError | Exception unavailable) { // if java management classes not available
StatusLogger.getLogger().info("ThreadDumpMessage uses BasicThreadInfoFactory: " +
"could not load extended ThreadInfoFactory: {}", unavailable.toString());
result = null;
}
return result == null ? new BasicThreadInfoFactory() : result;
}
@Override
public String toString() {
return getFormattedMessage();
}
/**
* Returns the ThreadDump in printable format.
* @return the ThreadDump suitable for logging.
*/
@Override
public String getFormattedMessage() {
if (formattedMessage != null) {
return formattedMessage;
}
final StringBuilder sb = new StringBuilder(255);
formatTo(sb);
return sb.toString();
}
@Override
public void formatTo(final StringBuilder sb) {
sb.append(title);
if (title.length() > 0) {
sb.append('\n');
}
for (final Map.Entry<ThreadInformation, StackTraceElement[]> entry : threads.entrySet()) {
final ThreadInformation info = entry.getKey();
info.printThreadInfo(sb);
info.printStack(sb, entry.getValue());
sb.append('\n');
}
}
/**
* Returns the title.
* @return the title.
*/
@Override
public String getFormat() {
return title == null ? Strings.EMPTY : title;
}
/**
* Returns an array with a single element, a Map containing the ThreadInformation as the key.
* and the StackTraceElement array as the value;
* @return the "parameters" to this Message.
*/
@Override
public Object[] getParameters() {
return null;
}
/**
* Creates a ThreadDumpMessageProxy that can be serialized.
* @return a ThreadDumpMessageProxy.
*/
protected Object writeReplace() {
return new ThreadDumpMessageProxy(this);
}
private void readObject(final ObjectInputStream stream)
throws InvalidObjectException {
throw new InvalidObjectException("Proxy required");
}
/**
* Proxy pattern used to serialize the ThreadDumpMessage.
*/
private static class ThreadDumpMessageProxy implements Serializable {
private static final long serialVersionUID = -3476620450287648269L;
private final String formattedMsg;
private final String title;
ThreadDumpMessageProxy(final ThreadDumpMessage msg) {
this.formattedMsg = msg.getFormattedMessage();
this.title = msg.title;
}
/**
* Returns a ThreadDumpMessage using the data in the proxy.
* @return a ThreadDumpMessage.
*/
protected Object readResolve() {
return new ThreadDumpMessage(formattedMsg, title);
}
}
/**
* Factory to create Thread information.
* <p>
* Implementations of this class are loaded via the standard java Service Provider interface.
* </p>
* @see /log4j-core/src/main/resources/META-INF/services/org.apache.logging.log4j.message.ThreadDumpMessage$ThreadInfoFactory
*/
public static interface ThreadInfoFactory {
Map<ThreadInformation, StackTraceElement[]> createThreadInfo();
}
/**
* Factory to create basic thread information.
*/
private static class BasicThreadInfoFactory implements ThreadInfoFactory {
@Override
public Map<ThreadInformation, StackTraceElement[]> createThreadInfo() {
final Map<Thread, StackTraceElement[]> map = Thread.getAllStackTraces();
final Map<ThreadInformation, StackTraceElement[]> threads =
new HashMap<>(map.size());
for (final Map.Entry<Thread, StackTraceElement[]> entry : map.entrySet()) {
threads.put(new BasicThreadInformation(entry.getKey()), entry.getValue());
}
return threads;
}
}
/**
* Always returns null.
*
* @return null
*/
@Override
public Throwable getThrowable() {
return null;
}
}
| apache-2.0 |
greg-dove/flex-falcon | debugger/src/main/java/flash/tools/debugger/VersionException.java | 1250 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flash.tools.debugger;
/**
* VersionException is thrown when the Session
* is connected to a Player that does not support
* a given operation.
*/
public class VersionException extends PlayerDebugException
{
private static final long serialVersionUID = 4966523681921720567L;
@Override
public String getMessage()
{
return Bootstrap.getLocalizationManager().getLocalizedTextString("unexpectedPlayerVersion"); //$NON-NLS-1$
}
}
| apache-2.0 |
janstey/fabric8 | gateway/gateway-core/src/main/java/io/fabric8/gateway/handlers/http/HttpGateway.java | 2018 | /**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.gateway.handlers.http;
import io.fabric8.gateway.CallDetailRecord;
import java.net.InetSocketAddress;
import java.util.Map;
/**
* An interface to working with a HTTP gateway which has the responsibility for
* configuring a number of mapping rules so that the
* {@link HttpGatewayHandler} can interrogate
* the mapping rules via the {@link #getMappedServices()} method so it can decide which
* services to proxy requests to.
*/
public interface HttpGateway {
/**
* Adds a mapping rule for exposing a number of services at URI prefixes
*/
void addMappingRuleConfiguration(HttpMappingRule mappingRule);
/**
* Removes a mapping rule
*/
void removeMappingRuleConfiguration(HttpMappingRule mappingRule);
/**
* Returns the currently mapped services indexed by URI prefix on this HTTP gateway
*/
Map<String, MappedServices> getMappedServices();
/**
* Returns true if the mapping index is enabled which by default
* returns a JSON document describing the mapping of URI prefixes to services
*/
boolean isEnableIndex();
/**
* Returns address the gateway service is listening on.
*/
public InetSocketAddress getLocalAddress();
/**
* Adds a CallDetailRecord for reporting purposes
*/
public void addCallDetailRecord(CallDetailRecord cdr);
}
| apache-2.0 |
xschildw/Synapse-Repository-Services | lib/stackConfiguration/src/main/java/org/sagebionetworks/StackConfigurationGuiceModule.java | 992 | package org.sagebionetworks;
import org.sagebionetworks.aws.AwsClientFactory;
import org.sagebionetworks.aws.SynapseS3Client;
import com.amazonaws.services.kms.AWSKMS;
import com.google.inject.Provides;
/**
* Provides dependency injection mapping for the StackConfiguration project.
*
*/
public class StackConfigurationGuiceModule extends com.google.inject.AbstractModule {
@Override
protected void configure() {
bind(LoggerProvider.class).to(LoggerProviderImpl.class);
bind(PropertyProvider.class).to(PropertyProviderImpl.class);
bind(ConfigurationProperties.class).to(ConfigurationPropertiesImpl.class);
bind(StackConfiguration.class).to(StackConfigurationImpl.class);
bind(StackEncrypter.class).to(StackEncrypterImpl.class);
}
@Provides
public AWSKMS provideAWSKMSClient() {
return AwsClientFactory.createAmazonKeyManagementServiceClient();
}
@Provides
public SynapseS3Client provideAmazonS3Client() {
return AwsClientFactory.createAmazonS3Client();
}
}
| apache-2.0 |
zhengxgs/elasticsearch-2.4.1 | core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java | 17581 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
/**
*
*/
public class ParentFieldMapper extends MetadataFieldMapper {
public static final String NAME = "_parent";
public static final String CONTENT_TYPE = "_parent";
public static class Defaults {
public static final String NAME = ParentFieldMapper.NAME;
public static final MappedFieldType FIELD_TYPE = new ParentFieldType();
public static final MappedFieldType JOIN_FIELD_TYPE = new ParentFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
JOIN_FIELD_TYPE.setHasDocValues(true);
JOIN_FIELD_TYPE.setDocValuesType(DocValuesType.SORTED);
JOIN_FIELD_TYPE.freeze();
}
}
public static class Builder extends MetadataFieldMapper.Builder<Builder, ParentFieldMapper> {
private String parentType;
protected String indexName;
private final String documentType;
private final MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
public Builder(String documentType) {
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
this.indexName = name;
this.documentType = documentType;
builder = this;
}
public Builder type(String type) {
this.parentType = type;
return builder;
}
@Override
public Builder fieldDataSettings(Settings fieldDataSettings) {
Settings settings = Settings.builder().put(childJoinFieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
childJoinFieldType.setFieldDataType(new FieldDataType(childJoinFieldType.fieldDataType().getType(), settings));
return this;
}
@Override
public ParentFieldMapper build(BuilderContext context) {
if (parentType == null) {
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
}
parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(documentType)));
parentJoinFieldType.setFieldDataType(null);
childJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType)));
if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
childJoinFieldType.setHasDocValues(false);
childJoinFieldType.setDocValuesType(DocValuesType.NONE);
parentJoinFieldType.setHasDocValues(false);
parentJoinFieldType.setDocValuesType(DocValuesType.NONE);
}
return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings());
}
}
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.type());
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
// ignore before 2.0, reject on and after 2.0
iterator.remove();
} else if (fieldName.equals("fielddata")) {
// Only take over `loading`, since that is the only option now that is configurable:
Map<String, String> fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata"));
if (fieldDataSettings.containsKey(MappedFieldType.Loading.KEY)) {
Settings settings = settingsBuilder().put(MappedFieldType.Loading.KEY, fieldDataSettings.get(MappedFieldType.Loading.KEY)).build();
builder.fieldDataSettings(settings);
}
iterator.remove();
}
}
return builder;
}
@Override
public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String parentType) {
return new ParentFieldMapper(indexSettings, fieldType, parentType);
}
}
static final class ParentFieldType extends MappedFieldType {
public ParentFieldType() {
setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
}
protected ParentFieldType(ParentFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new ParentFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
@Override
public Object valueForSearch(Object value) {
if (value == null) {
return null;
}
String sValue = value.toString();
if (sValue == null) {
return null;
}
int index = sValue.indexOf(Uid.DELIMITER);
if (index == -1) {
return sValue;
}
return sValue.substring(index + 1);
}
/**
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return termsQuery(Collections.singletonList(value), context);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
if (context == null) {
return super.termsQuery(values, context);
}
List<String> types = new ArrayList<>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
List<BytesRef> bValues = new ArrayList<>(values.size());
for (Object value : values) {
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
bValues.add(bValue);
} else {
// we use all non child types, cause we don't know if its exact or not...
for (String type : types) {
bValues.add(Uid.createUidAsBytes(type, bValue));
}
}
}
return new TermsQuery(names().indexName(), bValues);
}
}
private final String parentType;
// determines the field data settings
private MappedFieldType childJoinFieldType;
// has no impact of field data settings, is just here for creating a join field, the parent field mapper in the child type pointing to this type determines the field data settings for this join field
private final MappedFieldType parentJoinFieldType;
private ParentFieldMapper(MappedFieldType fieldType, MappedFieldType parentJoinFieldType, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
this.parentType = parentType;
this.parentJoinFieldType = parentJoinFieldType;
this.parentJoinFieldType.freeze();
this.childJoinFieldType = childJoinFieldType;
if (childJoinFieldType != null) {
this.childJoinFieldType.freeze();
}
}
private ParentFieldMapper(Settings indexSettings, MappedFieldType existing, String parentType) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), joinFieldTypeForParentType(parentType, indexSettings), null, null, indexSettings);
}
private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) {
MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType)));
Version indexCreated = Version.indexCreated(indexSettings);
if (indexCreated.before(Version.V_2_0_0_beta1)) {
parentJoinFieldType.setHasDocValues(false);
parentJoinFieldType.setDocValuesType(DocValuesType.NONE);
}
parentJoinFieldType.freeze();
return parentJoinFieldType;
}
public MappedFieldType getParentJoinFieldType() {
return parentJoinFieldType;
}
public MappedFieldType getChildJoinFieldType() {
return childJoinFieldType;
}
public String type() {
return parentType;
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
if (context.sourceToParse().flyweight() == false) {
parse(context);
}
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
boolean parent = context.docMapper().isParent(context.type());
if (parent) {
addJoinFieldIfNeeded(fields, parentJoinFieldType, context.id());
}
if (!active()) {
return;
}
if (context.parser().currentName() != null && context.parser().currentName().equals(Defaults.NAME)) {
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
} else {
// otherwise, we are running it post processing of the xcontent
String parsedParentId = context.doc().get(Defaults.NAME);
if (context.sourceToParse().parent() != null) {
String parentId = context.sourceToParse().parent();
if (parsedParentId == null) {
if (parentId == null) {
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) {
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
}
}
}
// we have parent mapping, yet no value was set, ignore it...
}
private void addJoinFieldIfNeeded(List<Field> fields, MappedFieldType fieldType, String id) {
if (fieldType.hasDocValues()) {
fields.add(new SortedDocValuesField(fieldType.names().indexName(), new BytesRef(id)));
}
}
public static String joinField(String parentType) {
return ParentFieldMapper.NAME + "#" + parentType;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
private boolean joinFieldHasCustomFieldDataSettings() {
return childJoinFieldType != null && childJoinFieldType.fieldDataType() != null && childJoinFieldType.fieldDataType().equals(Defaults.JOIN_FIELD_TYPE.fieldDataType()) == false;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!active()) {
return builder;
}
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
builder.startObject(CONTENT_TYPE);
builder.field("type", parentType);
if (includeDefaults || joinFieldHasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) childJoinFieldType.fieldDataType().getSettings().getAsMap());
}
builder.endObject();
return builder;
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
if (Objects.equals(parentType, fieldMergeWith.parentType) == false) {
throw new IllegalArgumentException("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]");
}
List<String> conflicts = new ArrayList<>();
fieldType().checkCompatibility(fieldMergeWith.fieldType(), conflicts, true); // always strict, this cannot change
parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here
if (childJoinFieldType != null) {
// TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type.
childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, updateAllTypes == false);
}
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Merge conflicts: " + conflicts);
}
if (active()) {
childJoinFieldType = fieldMergeWith.childJoinFieldType.clone();
}
}
/**
* @return Whether the _parent field is actually configured.
*/
public boolean active() {
return parentType != null;
}
}
| apache-2.0 |
KeyNexus/netty | src/main/java/org/jboss/netty/handler/codec/socks/SocksCmdResponse.java | 3158 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.handler.codec.socks;
import org.jboss.netty.buffer.ChannelBuffer;
/**
* An socks cmd response.
*
* @see SocksCmdRequest
* @see SocksCmdResponseDecoder
*/
public final class SocksCmdResponse extends SocksResponse {
private final CmdStatus cmdStatus;
private final AddressType addressType;
// All arrays are initialized on construction time to 0/false/null remove array Initialization
private static final byte[] IPv4_HOSTNAME_ZEROED = {0x00, 0x00, 0x00, 0x00};
private static final byte[] IPv6_HOSTNAME_ZEROED = {0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00};
public SocksCmdResponse(CmdStatus cmdStatus, AddressType addressType) {
super(SocksResponseType.CMD);
if (cmdStatus == null) {
throw new NullPointerException("cmdStatus");
}
if (addressType == null) {
throw new NullPointerException("addressType");
}
this.cmdStatus = cmdStatus;
this.addressType = addressType;
}
/**
* Returns the {@link CmdStatus} of this {@link SocksCmdResponse}
*
* @return The {@link CmdStatus} of this {@link SocksCmdResponse}
*/
public CmdStatus getCmdStatus() {
return cmdStatus;
}
/**
* Returns the {@link AddressType} of this {@link SocksCmdResponse}
*
* @return The {@link AddressType} of this {@link SocksCmdResponse}
*/
public AddressType getAddressType() {
return addressType;
}
@Override
public void encodeAsByteBuf(ChannelBuffer channelBuffer) {
channelBuffer.writeByte(getProtocolVersion().getByteValue());
channelBuffer.writeByte(cmdStatus.getByteValue());
channelBuffer.writeByte(0x00);
channelBuffer.writeByte(addressType.getByteValue());
switch (addressType) {
case IPv4: {
channelBuffer.writeBytes(IPv4_HOSTNAME_ZEROED);
channelBuffer.writeShort(0);
break;
}
case DOMAIN: {
channelBuffer.writeByte(1); // domain length
channelBuffer.writeByte(0); // domain value
channelBuffer.writeShort(0); // port value
break;
}
case IPv6: {
channelBuffer.writeBytes(IPv6_HOSTNAME_ZEROED);
channelBuffer.writeShort(0);
break;
}
}
}
}
| apache-2.0 |
eclipse/gemini.blueprint | test-support/src/main/java/org/eclipse/gemini/blueprint/test/internal/support/Activator.java | 2050 | /******************************************************************************
* Copyright (c) 2006, 2010 VMware Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html and the Apache License v2.0
* is available at http://www.opensource.org/licenses/apache2.0.php.
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
* VMware Inc.
*****************************************************************************/
package org.eclipse.gemini.blueprint.test.internal.support;
import org.eclipse.gemini.blueprint.test.internal.TestRunnerService;
import org.eclipse.gemini.blueprint.test.internal.holder.HolderLoader;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceRegistration;
/**
* Default activator for Spring/OSGi test support. This class can be seen as the
* 'server-side' of the framework, which register the OsgiJUnitTest executor.
*
* @author Costin Leau
*
*/
public class Activator implements BundleActivator {
private ServiceRegistration<TestRunnerService> registration;
public void start(BundleContext context) throws Exception {
registration = context.registerService(TestRunnerService.class, new OsgiJUnitService(), null);
// add also the bundle id so that AbstractOsgiTest can determine its BundleContext when used in an environment
// where the system bundle is treated as a special case.
HolderLoader.INSTANCE.getHolder().setTestBundleId(context.getBundle().getBundleId());
}
public void stop(BundleContext context) throws Exception {
// unregister the service even though the framework should do this automatically
if (registration != null) {
registration.unregister();
}
}
}
| apache-2.0 |
shs96c/buck | src/com/facebook/buck/features/python/toolchain/impl/LazyPythonPlatform.java | 3629 | /*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.features.python.toolchain.impl;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.Flavor;
import com.facebook.buck.core.toolchain.ToolchainProvider;
import com.facebook.buck.features.python.PythonBuckConfig;
import com.facebook.buck.features.python.toolchain.PythonEnvironment;
import com.facebook.buck.features.python.toolchain.PythonInterpreter;
import com.facebook.buck.features.python.toolchain.PythonPlatform;
import com.facebook.buck.features.python.toolchain.PythonVersion;
import com.facebook.buck.util.ProcessExecutor;
import com.google.common.base.Suppliers;
import java.nio.file.Path;
import java.util.Optional;
import java.util.function.Supplier;
/**
* An implementation of {@link PythonPlatform} that lazily creates {@link PythonEnvironment} and cxx
* library.
*
* <p>This should be used to avoid creating all registered Python platform.
*/
public class LazyPythonPlatform implements PythonPlatform {
private final PythonBuckConfig pythonBuckConfig;
private final ProcessExecutor processExecutor;
private final Flavor flavor;
private final String configSection;
private final Supplier<PythonEnvironment> pythonEnvironmentSupplier;
public LazyPythonPlatform(
ToolchainProvider toolchainProvider,
PythonBuckConfig pythonBuckConfig,
ProcessExecutor processExecutor,
Flavor flavor,
String configSection) {
this.pythonBuckConfig = pythonBuckConfig;
this.processExecutor = processExecutor;
this.flavor = flavor;
this.configSection = configSection;
pythonEnvironmentSupplier =
Suppliers.memoize(
() -> {
PythonInterpreter pythonInterpreter =
toolchainProvider.getByName(
PythonInterpreter.DEFAULT_NAME, PythonInterpreter.class);
Path pythonPath = pythonInterpreter.getPythonInterpreterPath(configSection);
PythonVersion pythonVersion =
getVersion(pythonBuckConfig, this.processExecutor, configSection, pythonPath);
return new PythonEnvironment(pythonPath, pythonVersion);
});
}
private PythonVersion getVersion(
PythonBuckConfig pythonBuckConfig,
ProcessExecutor processExecutor,
String section,
Path path) {
Optional<PythonVersion> configuredVersion =
pythonBuckConfig.getConfiguredVersion(section).map(PythonVersionFactory::fromString);
if (configuredVersion.isPresent()) {
return configuredVersion.get();
}
try {
return PythonVersionFactory.fromInterpreter(processExecutor, path);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Flavor getFlavor() {
return flavor;
}
@Override
public PythonEnvironment getEnvironment() {
return pythonEnvironmentSupplier.get();
}
@Override
public Optional<BuildTarget> getCxxLibrary() {
return pythonBuckConfig.getCxxLibrary(configSection);
}
}
| apache-2.0 |
wildfly-swarm/wildfly-swarm | core/container/src/main/java/org/wildfly/swarm/internal/SwarmConfigMessages.java | 2460 | /**
* Copyright 2015-2017 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.internal;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.Logger;
import org.jboss.logging.annotations.Cause;
import org.jboss.logging.annotations.LogMessage;
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.MessageLogger;
/**
* @author <a href="mailto:jperkins@redhat.com">James R. Perkins</a>
*/
@MessageLogger(projectCode = "TTCONFIG", length = 4)
public interface SwarmConfigMessages extends BasicLogger {
SwarmConfigMessages MESSAGES = Logger.getMessageLogger(SwarmConfigMessages.class, "org.wildfly.swarm.config");
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 1, value = "Marshalling Project Stage property %s")
void marshalProjectStageProperty(String key);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2, value = "Marshalling XML from %s as: \n %s")
void marshalXml(String location, String xml);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 3, value = "Load standalone.xml via %s from %s")
void loadingStandaloneXml(String loader, String location);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 4, value = "Configuration:\n%s")
void configuration(String configuration);
@LogMessage(level = Logger.Level.ERROR)
@Message(id = 5, value = "Error resolving configurable value for %s.")
void errorResolvingConfigurableValue(String key, @Cause Throwable cause);
@LogMessage(level = Logger.Level.ERROR)
@Message(id = 6, value = "Error loading module.")
void errorLoadingModule(@Cause Throwable cause);
@LogMessage(level = Logger.Level.ERROR)
@Message(id = 7, value = "Error create extension %s from module %s.")
void errorCreatingExtension(String extensionClassName, String extensionModuleName, @Cause Throwable cause);
}
| apache-2.0 |
gingerwizard/elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java | 9329 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.BucketCollector;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.MultiBucketCollector;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* A specialization of {@link DeferringBucketCollector} that collects all
* matches and then is able to replay a given subset of buckets which represent
* the survivors from a pruning process performed by the aggregator that owns
* this collector.
*/
public class BestBucketsDeferringCollector extends DeferringBucketCollector {
static class Entry {
final LeafReaderContext context;
final PackedLongValues docDeltas;
final PackedLongValues buckets;
Entry(LeafReaderContext context, PackedLongValues docDeltas, PackedLongValues buckets) {
this.context = Objects.requireNonNull(context);
this.docDeltas = Objects.requireNonNull(docDeltas);
this.buckets = Objects.requireNonNull(buckets);
}
}
protected List<Entry> entries = new ArrayList<>();
protected BucketCollector collector;
protected final SearchContext searchContext;
protected final boolean isGlobal;
protected LeafReaderContext context;
protected PackedLongValues.Builder docDeltasBuilder;
protected PackedLongValues.Builder bucketsBuilder;
protected long maxBucket = -1;
protected boolean finished = false;
protected LongHash selectedBuckets;
/**
* Sole constructor.
* @param context The search context
* @param isGlobal Whether this collector visits all documents (global context)
*/
public BestBucketsDeferringCollector(SearchContext context, boolean isGlobal) {
this.searchContext = context;
this.isGlobal = isGlobal;
}
@Override
public ScoreMode scoreMode() {
if (collector == null) {
throw new IllegalStateException();
}
return collector.scoreMode();
}
/** Set the deferred collectors. */
@Override
public void setDeferredCollector(Iterable<BucketCollector> deferredCollectors) {
this.collector = MultiBucketCollector.wrap(deferredCollectors);
}
private void finishLeaf() {
if (context != null) {
assert docDeltasBuilder != null && bucketsBuilder != null;
entries.add(new Entry(context, docDeltasBuilder.build(), bucketsBuilder.build()));
}
}
@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException {
finishLeaf();
context = null;
// allocates the builder lazily in case this segment doesn't contain any match
docDeltasBuilder = null;
bucketsBuilder = null;
return new LeafBucketCollector() {
int lastDoc = 0;
@Override
public void collect(int doc, long bucket) throws IOException {
if (context == null) {
context = ctx;
docDeltasBuilder = PackedLongValues.packedBuilder(PackedInts.DEFAULT);
bucketsBuilder = PackedLongValues.packedBuilder(PackedInts.DEFAULT);
}
docDeltasBuilder.add(doc - lastDoc);
bucketsBuilder.add(bucket);
lastDoc = doc;
maxBucket = Math.max(maxBucket, bucket);
}
};
}
@Override
public void preCollection() throws IOException {
collector.preCollection();
}
@Override
public void postCollection() throws IOException {
finishLeaf();
finished = true;
}
/**
* Replay the wrapped collector, but only on a selection of buckets.
*/
@Override
public void prepareSelectedBuckets(long... selectedBuckets) throws IOException {
if (finished == false) {
throw new IllegalStateException("Cannot replay yet, collection is not finished: postCollect() has not been called");
}
if (this.selectedBuckets != null) {
throw new IllegalStateException("Already been replayed");
}
this.selectedBuckets = new LongHash(selectedBuckets.length, BigArrays.NON_RECYCLING_INSTANCE);
for (long ord : selectedBuckets) {
this.selectedBuckets.add(ord);
}
boolean needsScores = scoreMode().needsScores();
Weight weight = null;
if (needsScores) {
Query query = isGlobal ? new MatchAllDocsQuery() : searchContext.query();
weight = searchContext.searcher().createWeight(searchContext.searcher().rewrite(query), ScoreMode.COMPLETE, 1f);
}
for (Entry entry : entries) {
assert entry.docDeltas.size() > 0 : "segment should have at least one document to replay, got 0";
try {
final LeafBucketCollector leafCollector = collector.getLeafCollector(entry.context);
DocIdSetIterator scoreIt = null;
if (needsScores) {
Scorer scorer = weight.scorer(entry.context);
// We don't need to check if the scorer is null
// since we are sure that there are documents to replay (entry.docDeltas it not empty).
scoreIt = scorer.iterator();
leafCollector.setScorer(scorer);
}
final PackedLongValues.Iterator docDeltaIterator = entry.docDeltas.iterator();
final PackedLongValues.Iterator buckets = entry.buckets.iterator();
int doc = 0;
for (long i = 0, end = entry.docDeltas.size(); i < end; ++i) {
doc += docDeltaIterator.next();
final long bucket = buckets.next();
final long rebasedBucket = this.selectedBuckets.find(bucket);
if (rebasedBucket != -1) {
if (needsScores) {
if (scoreIt.docID() < doc) {
scoreIt.advance(doc);
}
// aggregations should only be replayed on matching documents
assert scoreIt.docID() == doc;
}
leafCollector.collect(doc, rebasedBucket);
}
}
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
collector.postCollection();
}
/**
* Wrap the provided aggregator so that it behaves (almost) as if it had
* been collected directly.
*/
@Override
public Aggregator wrap(final Aggregator in) {
return new WrappedAggregator(in) {
@Override
public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException {
if (selectedBuckets == null) {
throw new IllegalStateException("Collection has not been replayed yet.");
}
long[] rebasedOrds = new long[owningBucketOrds.length];
for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) {
rebasedOrds[ordIdx] = selectedBuckets.find(owningBucketOrds[ordIdx]);
if (rebasedOrds[ordIdx] == -1) {
throw new IllegalStateException("Cannot build for a bucket which has not been collected");
}
}
return in.buildAggregations(rebasedOrds);
}
};
}
}
| apache-2.0 |